From a6e482484eb8c633caf26e93ebb0774ffe98eac7 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 26 May 2023 11:30:40 +0100 Subject: [PATCH 01/21] Allow for knob values to be validated against multiple values. --- .../plugins/publish/validate_write_nodes.py | 44 ++++++++++++------- 1 file changed, 28 insertions(+), 16 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index aeecea655f..2a925fbeff 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -1,3 +1,5 @@ +from collections import defaultdict + import pyblish.api from openpype.pipeline.publish import get_errored_instances_from_context from openpype.hosts.nuke.api.lib import ( @@ -87,6 +89,11 @@ class ValidateNukeWriteNode( correct_data )) + # Collect key values of same type in a list. + values_by_name = defaultdict(list) + for knob_data in correct_data["knobs"]: + values_by_name[knob_data["name"]].append(knob_data["value"]) + for knob_data in correct_data["knobs"]: knob_type = knob_data["type"] self.log.debug("__ knob_type: {}".format( @@ -105,28 +112,33 @@ class ValidateNukeWriteNode( ) key = knob_data["name"] - value = knob_data["value"] + values = values_by_name[key] node_value = write_node[key].value() # fix type differences - if type(node_value) in (int, float): - try: - if isinstance(value, list): - value = color_gui_to_int(value) - else: - value = float(value) - node_value = float(node_value) - except ValueError: - value = str(value) - else: - value = str(value) - node_value = str(node_value) + fixed_values = [] + for value in values: + if type(node_value) in (int, float): + try: - self.log.debug("__ key: {} | value: {}".format( - key, value + if isinstance(value, list): + value = color_gui_to_int(value) + else: + value = float(value) + node_value = float(node_value) + except ValueError: + value = str(value) + else: + value = str(value) + node_value = str(node_value) + + fixed_values.append(value) + + self.log.debug("__ key: {} | values: {}".format( + key, fixed_values )) if ( - node_value != value + node_value not in fixed_values and key != "file" and key != "tile_color" ): From 0f8cc0301fd2e44259c524eefe39b7db733fecaa Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 23 Aug 2023 14:47:10 +0200 Subject: [PATCH 02/21] Nuke: fixing missing `instance_id` --- openpype/hosts/nuke/api/pipeline.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 65b4b91323..a1d290646c 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -543,6 +543,9 @@ def list_instances(creator_id=None): For SubsetManager + Args: + creator_id (Optional[str]): creator identifier + Returns: (list) of dictionaries matching instances format """ @@ -575,10 +578,13 @@ def list_instances(creator_id=None): if creator_id and instance_data["creator_identifier"] != creator_id: continue - if instance_data["instance_id"] in instance_ids: + instance_id = instance_data.get("instance_id") + if not instance_id: + pass + elif instance_id in instance_ids: instance_data.pop("instance_id") else: - instance_ids.add(instance_data["instance_id"]) + instance_ids.add(instance_id) # node name could change, so update subset name data _update_subset_name_data(instance_data, node) From fe5dc20877355a8b23a54c359dedca7111e6a00e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 23 Aug 2023 17:31:46 +0200 Subject: [PATCH 03/21] traypublisher: adding preset for audio product --- .../project_settings/traypublisher.json | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index dda958ebcd..7f7b7d1452 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -256,6 +256,23 @@ "allow_multiple_items": true, "allow_version_control": false, "extensions": [] + }, + { + "family": "audio", + "identifier": "", + "label": "Audio ", + "icon": "fa5s.file-audio", + "default_variants": [ + "Main" + ], + "description": "Audio product", + "detailed_description": "Audio files for review or final delivery", + "allow_sequences": false, + "allow_multiple_items": false, + "allow_version_control": false, + "extensions": [ + ".wav" + ] } ], "editorial_creators": { From 3f1fb736a60d5ae88e4a425b2d2ce75f3554dae4 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 28 Aug 2023 18:45:43 +0800 Subject: [PATCH 04/21] make sure arnold and other renderers not fallbacking to workfile --- .../deadline/plugins/publish/submit_max_deadline.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_max_deadline.py b/openpype/modules/deadline/plugins/publish/submit_max_deadline.py index 8e05582962..f1127f60f2 100644 --- a/openpype/modules/deadline/plugins/publish/submit_max_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_max_deadline.py @@ -12,7 +12,9 @@ from openpype.pipeline import ( legacy_io, OpenPypePyblishPluginMixin ) -from openpype.settings import get_project_settings +from openpype.pipeline.publish.lib import ( + replace_with_published_scene_path +) from openpype.hosts.max.api.lib import ( get_current_renderer, get_multipass_setting @@ -247,7 +249,12 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, if instance.data["renderer"] == "Redshift_Renderer": self.log.debug("Using Redshift...published scene wont be used..") replace_in_path = False - return replace_in_path + return replace_with_published_scene_path( + instance, replace_in_path) + else: + return replace_with_published_scene_path( + instance, replace_in_path) + @staticmethod def _iter_expected_files(exp): From 6e50d1a814c3765352038c5bf640bd9bc5409f43 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 28 Aug 2023 12:47:43 +0200 Subject: [PATCH 05/21] Fix log message - actually provide plugin name after "Plugin" --- openpype/pipeline/publish/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index ada12800a9..810e1dd342 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -465,7 +465,7 @@ def apply_plugin_settings_automatically(plugin, settings, logger=None): for option, value in settings.items(): if logger: logger.debug("Plugin {} - Attr: {} -> {}".format( - option, value, plugin.__name__ + plugin.__name__, option, value )) setattr(plugin, option, value) From a59de7db311bfeca3114c51920eed9f6098dbc2c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 28 Aug 2023 18:50:20 +0800 Subject: [PATCH 06/21] clean up --- .../deadline/plugins/publish/submit_max_deadline.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_max_deadline.py b/openpype/modules/deadline/plugins/publish/submit_max_deadline.py index f1127f60f2..d8725e853c 100644 --- a/openpype/modules/deadline/plugins/publish/submit_max_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_max_deadline.py @@ -249,12 +249,8 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, if instance.data["renderer"] == "Redshift_Renderer": self.log.debug("Using Redshift...published scene wont be used..") replace_in_path = False - return replace_with_published_scene_path( - instance, replace_in_path) - else: - return replace_with_published_scene_path( - instance, replace_in_path) - + return replace_with_published_scene_path( + instance, replace_in_path) @staticmethod def _iter_expected_files(exp): From f95c4f27cee49f2281e427a6b855ca3364c99291 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 28 Aug 2023 13:48:20 +0200 Subject: [PATCH 07/21] Use `logging` module style formatting --- openpype/pipeline/publish/lib.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 810e1dd342..815761cd0f 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -464,9 +464,8 @@ def apply_plugin_settings_automatically(plugin, settings, logger=None): for option, value in settings.items(): if logger: - logger.debug("Plugin {} - Attr: {} -> {}".format( - plugin.__name__, option, value - )) + logger.debug("Plugin %s - Attr: %s -> %s", + plugin.__name__, option, value) setattr(plugin, option, value) From 3eb2cc21b2ae34405f6eda2539c73384e2e948b4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 29 Aug 2023 12:01:29 +0200 Subject: [PATCH 08/21] Update ayon-python-api (#5512) * query asset only if asset id is available * updated ayon api * fix subsets arguments --- openpype/client/server/entities.py | 8 +- openpype/tools/utils/tasks_widget.py | 2 +- .../vendor/python/common/ayon_api/__init__.py | 10 + .../vendor/python/common/ayon_api/_api.py | 20 ++ .../python/common/ayon_api/constants.py | 7 +- .../python/common/ayon_api/graphql_queries.py | 6 +- .../python/common/ayon_api/server_api.py | 236 +++++++++++++++--- .../vendor/python/common/ayon_api/version.py | 2 +- 8 files changed, 248 insertions(+), 43 deletions(-) diff --git a/openpype/client/server/entities.py b/openpype/client/server/entities.py index 9579f13add..39322627bb 100644 --- a/openpype/client/server/entities.py +++ b/openpype/client/server/entities.py @@ -83,10 +83,10 @@ def _get_subsets( project_name, subset_ids, subset_names, - folder_ids, - names_by_folder_ids, - active, - fields + folder_ids=folder_ids, + names_by_folder_ids=names_by_folder_ids, + active=active, + fields=fields, ): yield convert_v4_subset_to_v3(subset) diff --git a/openpype/tools/utils/tasks_widget.py b/openpype/tools/utils/tasks_widget.py index 8c0505223e..b554ed50d3 100644 --- a/openpype/tools/utils/tasks_widget.py +++ b/openpype/tools/utils/tasks_widget.py @@ -75,7 +75,7 @@ class TasksModel(QtGui.QStandardItemModel): def set_asset_id(self, asset_id): asset_doc = None - if self._context_is_valid(): + if asset_id and self._context_is_valid(): project_name = self._get_current_project() asset_doc = get_asset_by_id( project_name, asset_id, fields=["data.tasks"] diff --git a/openpype/vendor/python/common/ayon_api/__init__.py b/openpype/vendor/python/common/ayon_api/__init__.py index 027e7a3da2..dc3d361f46 100644 --- a/openpype/vendor/python/common/ayon_api/__init__.py +++ b/openpype/vendor/python/common/ayon_api/__init__.py @@ -48,6 +48,11 @@ from ._api import ( patch, delete, + get_timeout, + set_timeout, + get_max_retries, + set_max_retries, + get_event, get_events, dispatch_event, @@ -245,6 +250,11 @@ __all__ = ( "patch", "delete", + "get_timeout", + "set_timeout", + "get_max_retries", + "set_max_retries", + "get_event", "get_events", "dispatch_event", diff --git a/openpype/vendor/python/common/ayon_api/_api.py b/openpype/vendor/python/common/ayon_api/_api.py index 1d7b1837f1..22e137d6e5 100644 --- a/openpype/vendor/python/common/ayon_api/_api.py +++ b/openpype/vendor/python/common/ayon_api/_api.py @@ -474,6 +474,26 @@ def delete(*args, **kwargs): return con.delete(*args, **kwargs) +def get_timeout(*args, **kwargs): + con = get_server_api_connection() + return con.get_timeout(*args, **kwargs) + + +def set_timeout(*args, **kwargs): + con = get_server_api_connection() + return con.set_timeout(*args, **kwargs) + + +def get_max_retries(*args, **kwargs): + con = get_server_api_connection() + return con.get_max_retries(*args, **kwargs) + + +def set_max_retries(*args, **kwargs): + con = get_server_api_connection() + return con.set_max_retries(*args, **kwargs) + + def get_event(*args, **kwargs): con = get_server_api_connection() return con.get_event(*args, **kwargs) diff --git a/openpype/vendor/python/common/ayon_api/constants.py b/openpype/vendor/python/common/ayon_api/constants.py index eb1ace0590..eaeb77b607 100644 --- a/openpype/vendor/python/common/ayon_api/constants.py +++ b/openpype/vendor/python/common/ayon_api/constants.py @@ -1,18 +1,21 @@ # Environments where server url and api key are stored for global connection SERVER_URL_ENV_KEY = "AYON_SERVER_URL" SERVER_API_ENV_KEY = "AYON_API_KEY" +SERVER_TIMEOUT_ENV_KEY = "AYON_SERVER_TIMEOUT" +SERVER_RETRIES_ENV_KEY = "AYON_SERVER_RETRIES" + # Backwards compatibility SERVER_TOKEN_ENV_KEY = SERVER_API_ENV_KEY # --- User --- DEFAULT_USER_FIELDS = { - "roles", + "accessGroups", + "defaultAccessGroups", "name", "isService", "isManager", "isGuest", "isAdmin", - "defaultRoles", "createdAt", "active", "hasPassword", diff --git a/openpype/vendor/python/common/ayon_api/graphql_queries.py b/openpype/vendor/python/common/ayon_api/graphql_queries.py index f31134a04d..2435fc8a17 100644 --- a/openpype/vendor/python/common/ayon_api/graphql_queries.py +++ b/openpype/vendor/python/common/ayon_api/graphql_queries.py @@ -247,9 +247,11 @@ def products_graphql_query(fields): query = GraphQlQuery("ProductsQuery") project_name_var = query.add_variable("projectName", "String!") - folder_ids_var = query.add_variable("folderIds", "[String!]") product_ids_var = query.add_variable("productIds", "[String!]") product_names_var = query.add_variable("productNames", "[String!]") + folder_ids_var = query.add_variable("folderIds", "[String!]") + product_types_var = query.add_variable("productTypes", "[String!]") + statuses_var = query.add_variable("statuses", "[String!]") project_field = query.add_field("project") project_field.set_filter("name", project_name_var) @@ -258,6 +260,8 @@ def products_graphql_query(fields): products_field.set_filter("ids", product_ids_var) products_field.set_filter("names", product_names_var) products_field.set_filter("folderIds", folder_ids_var) + products_field.set_filter("productTypes", product_types_var) + products_field.set_filter("statuses", statuses_var) nested_fields = fields_to_dict(set(fields)) add_links_fields(products_field, nested_fields) diff --git a/openpype/vendor/python/common/ayon_api/server_api.py b/openpype/vendor/python/common/ayon_api/server_api.py index f2689e88dc..511a239a83 100644 --- a/openpype/vendor/python/common/ayon_api/server_api.py +++ b/openpype/vendor/python/common/ayon_api/server_api.py @@ -2,6 +2,7 @@ import os import re import io import json +import time import logging import collections import platform @@ -26,6 +27,8 @@ except ImportError: from json import JSONDecodeError as RequestsJSONDecodeError from .constants import ( + SERVER_TIMEOUT_ENV_KEY, + SERVER_RETRIES_ENV_KEY, DEFAULT_PRODUCT_TYPE_FIELDS, DEFAULT_PROJECT_FIELDS, DEFAULT_FOLDER_FIELDS, @@ -127,6 +130,8 @@ class RestApiResponse(object): @property def text(self): + if self._response is None: + return self.detail return self._response.text @property @@ -135,6 +140,8 @@ class RestApiResponse(object): @property def headers(self): + if self._response is None: + return {} return self._response.headers @property @@ -148,6 +155,8 @@ class RestApiResponse(object): @property def content(self): + if self._response is None: + return b"" return self._response.content @property @@ -339,7 +348,11 @@ class ServerAPI(object): variable value 'AYON_CERT_FILE' by default. create_session (Optional[bool]): Create session for connection if token is available. Default is True. + timeout (Optional[float]): Timeout for requests. + max_retries (Optional[int]): Number of retries for requests. """ + _default_timeout = 10.0 + _default_max_retries = 3 def __init__( self, @@ -352,6 +365,8 @@ class ServerAPI(object): ssl_verify=None, cert=None, create_session=True, + timeout=None, + max_retries=None, ): if not base_url: raise ValueError("Invalid server URL {}".format(str(base_url))) @@ -370,6 +385,13 @@ class ServerAPI(object): ) self._sender = sender + self._timeout = None + self._max_retries = None + + # Set timeout and max retries based on passed values + self.set_timeout(timeout) + self.set_max_retries(max_retries) + if ssl_verify is None: # Custom AYON env variable for CA file or 'True' # - that should cover most default behaviors in 'requests' @@ -474,6 +496,87 @@ class ServerAPI(object): ssl_verify = property(get_ssl_verify, set_ssl_verify) cert = property(get_cert, set_cert) + @classmethod + def get_default_timeout(cls): + """Default value for requests timeout. + + First looks for environment variable SERVER_TIMEOUT_ENV_KEY which + can affect timeout value. If not available then use class + attribute '_default_timeout'. + + Returns: + float: Timeout value in seconds. + """ + + try: + return float(os.environ.get(SERVER_TIMEOUT_ENV_KEY)) + except (ValueError, TypeError): + pass + + return cls._default_timeout + + @classmethod + def get_default_max_retries(cls): + """Default value for requests max retries. + + First looks for environment variable SERVER_RETRIES_ENV_KEY, which + can affect max retries value. If not available then use class + attribute '_default_max_retries'. + + Returns: + int: Max retries value. + """ + + try: + return int(os.environ.get(SERVER_RETRIES_ENV_KEY)) + except (ValueError, TypeError): + pass + + return cls._default_max_retries + + def get_timeout(self): + """Current value for requests timeout. + + Returns: + float: Timeout value in seconds. + """ + + return self._timeout + + def set_timeout(self, timeout): + """Change timeout value for requests. + + Args: + timeout (Union[float, None]): Timeout value in seconds. + """ + + if timeout is None: + timeout = self.get_default_timeout() + self._timeout = float(timeout) + + def get_max_retries(self): + """Current value for requests max retries. + + Returns: + int: Max retries value. + """ + + return self._max_retries + + def set_max_retries(self, max_retries): + """Change max retries value for requests. + + Args: + max_retries (Union[int, None]): Max retries value. + """ + + if max_retries is None: + max_retries = self.get_default_max_retries() + self._max_retries = int(max_retries) + + timeout = property(get_timeout, set_timeout) + max_retries = property(get_max_retries, set_max_retries) + @property def access_token(self): """Access token used for authorization to server. @@ -890,9 +993,17 @@ class ServerAPI(object): for attr, filter_value in filters.items(): query.set_variable_value(attr, filter_value) + # Backwards compatibility for server 0.3.x + # - will be removed in future releases + major, minor, _, _, _ = self.server_version_tuple + access_groups_field = "accessGroups" + if major == 0 and minor <= 3: + access_groups_field = "roles" + for parsed_data in query.continuous_query(self): for user in parsed_data["users"]: - user["roles"] = json.loads(user["roles"]) + user[access_groups_field] = json.loads( + user[access_groups_field]) yield user def get_user(self, username=None): @@ -1004,6 +1115,10 @@ class ServerAPI(object): logout_from_server(self._base_url, self._access_token) def _do_rest_request(self, function, url, **kwargs): + kwargs.setdefault("timeout", self.timeout) + max_retries = kwargs.get("max_retries", self.max_retries) + if max_retries < 1: + max_retries = 1 if self._session is None: # Validate token if was not yet validated # - ignore validation if we're in middle of @@ -1023,38 +1138,54 @@ class ServerAPI(object): elif isinstance(function, RequestType): function = self._session_functions_mapping[function] - try: - response = function(url, **kwargs) + response = None + new_response = None + for _ in range(max_retries): + try: + response = function(url, **kwargs) + break + + except ConnectionRefusedError: + # Server may be restarting + new_response = RestApiResponse( + None, + {"detail": "Unable to connect the server. Connection refused"} + ) + except requests.exceptions.Timeout: + # Connection timed out + new_response = RestApiResponse( + None, + {"detail": "Connection timed out."} + ) + except requests.exceptions.ConnectionError: + # Other connection error (ssl, etc) - does not make sense to + # try call server again + new_response = RestApiResponse( + None, + {"detail": "Unable to connect the server. Connection error"} + ) + break + + time.sleep(0.1) + + if new_response is not None: + return new_response + + content_type = response.headers.get("Content-Type") + if content_type == "application/json": + try: + new_response = RestApiResponse(response) + except JSONDecodeError: + new_response = RestApiResponse( + None, + { + "detail": "The response is not a JSON: {}".format( + response.text) + } + ) - except ConnectionRefusedError: - new_response = RestApiResponse( - None, - {"detail": "Unable to connect the server. Connection refused"} - ) - except requests.exceptions.ConnectionError: - new_response = RestApiResponse( - None, - {"detail": "Unable to connect the server. Connection error"} - ) else: - content_type = response.headers.get("Content-Type") - if content_type == "application/json": - try: - new_response = RestApiResponse(response) - except JSONDecodeError: - new_response = RestApiResponse( - None, - { - "detail": "The response is not a JSON: {}".format( - response.text) - } - ) - - elif content_type in ("image/jpeg", "image/png"): - new_response = RestApiResponse(response) - - else: - new_response = RestApiResponse(response) + new_response = RestApiResponse(response) self.log.debug("Response {}".format(str(new_response))) return new_response @@ -1747,7 +1878,15 @@ class ServerAPI(object): entity_type_defaults = DEFAULT_WORKFILE_INFO_FIELDS elif entity_type == "user": - entity_type_defaults = DEFAULT_USER_FIELDS + entity_type_defaults = set(DEFAULT_USER_FIELDS) + # Backwards compatibility for server 0.3.x + # - will be removed in future releases + major, minor, _, _, _ = self.server_version_tuple + if major == 0 and minor <= 3: + entity_type_defaults.discard("accessGroups") + entity_type_defaults.discard("defaultAccessGroups") + entity_type_defaults.add("roles") + entity_type_defaults.add("defaultRoles") else: raise ValueError("Unknown entity type \"{}\"".format(entity_type)) @@ -2124,7 +2263,12 @@ class ServerAPI(object): server. """ - result = self.get("desktop/dependency_packages") + endpoint = "desktop/dependencyPackages" + major, minor, _, _, _ = self.server_version_tuple + if major == 0 and minor <= 3: + endpoint = "desktop/dependency_packages" + + result = self.get(endpoint) result.raise_for_status() return result.data @@ -3810,6 +3954,8 @@ class ServerAPI(object): product_ids=None, product_names=None, folder_ids=None, + product_types=None, + statuses=None, names_by_folder_ids=None, active=True, fields=None, @@ -3828,6 +3974,10 @@ class ServerAPI(object): filtering. folder_ids (Optional[Iterable[str]]): Ids of task parents. Use 'None' if folder is direct child of project. + product_types (Optional[Iterable[str]]): Product types used for + filtering. + statuses (Optional[Iterable[str]]): Product statuses used for + filtering. names_by_folder_ids (Optional[dict[str, Iterable[str]]]): Product name filtering by folder id. active (Optional[bool]): Filter active/inactive products. @@ -3862,6 +4012,18 @@ class ServerAPI(object): if not filter_folder_ids: return + filter_product_types = None + if product_types is not None: + filter_product_types = set(product_types) + if not filter_product_types: + return + + filter_statuses = None + if statuses is not None: + filter_statuses = set(statuses) + if not filter_statuses: + return + # This will disable 'folder_ids' and 'product_names' filters # - maybe could be enhanced in future? if names_by_folder_ids is not None: @@ -3881,7 +4043,7 @@ class ServerAPI(object): fields = set(fields) | {"id"} if "attrib" in fields: fields.remove("attrib") - fields |= self.get_attributes_fields_for_type("folder") + fields |= self.get_attributes_fields_for_type("product") else: fields = self.get_default_fields_for_type("product") @@ -3908,6 +4070,12 @@ class ServerAPI(object): if filter_folder_ids: filters["folderIds"] = list(filter_folder_ids) + if filter_product_types: + filters["productTypes"] = list(filter_product_types) + + if filter_statuses: + filters["statuses"] = list(filter_statuses) + if product_ids: filters["productIds"] = list(product_ids) diff --git a/openpype/vendor/python/common/ayon_api/version.py b/openpype/vendor/python/common/ayon_api/version.py index df841e0829..f3826a6407 100644 --- a/openpype/vendor/python/common/ayon_api/version.py +++ b/openpype/vendor/python/common/ayon_api/version.py @@ -1,2 +1,2 @@ """Package declaring Python API for Ayon server.""" -__version__ = "0.3.5" +__version__ = "0.4.1" From 61a8ff26f0d42577d4f19346242270ab56a75505 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Aug 2023 15:06:52 +0200 Subject: [PATCH 09/21] General: Fix Validate Publish Dir Validator (#5534) * Fix using wrong key * Update docstrings --- openpype/plugins/publish/validate_publish_dir.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/openpype/plugins/publish/validate_publish_dir.py b/openpype/plugins/publish/validate_publish_dir.py index 2f41127548..ad5fd34434 100644 --- a/openpype/plugins/publish/validate_publish_dir.py +++ b/openpype/plugins/publish/validate_publish_dir.py @@ -7,12 +7,12 @@ from openpype.pipeline.publish import ( class ValidatePublishDir(pyblish.api.InstancePlugin): - """Validates if 'publishDir' is a project directory + """Validates if files are being published into a project directory - 'publishDir' is collected based on publish templates. In specific cases - ('source' template) source folder of items is used as a 'publishDir', this - validates if it is inside any project dir for the project. - (eg. files are not published from local folder, unaccessible for studio' + In specific cases ('source' template - in place publishing) source folder + of published items is used as a regular `publish` dir. + This validates if it is inside any project dir for the project. + (eg. files are not published from local folder, inaccessible for studio') """ @@ -44,6 +44,8 @@ class ValidatePublishDir(pyblish.api.InstancePlugin): anatomy = instance.context.data["anatomy"] + # original_dirname must be convertable to rootless path + # in other case it is path inside of root folder for the project success, _ = anatomy.find_root_template_from_path(original_dirname) formatting_data = { @@ -56,11 +58,12 @@ class ValidatePublishDir(pyblish.api.InstancePlugin): formatting_data=formatting_data) def _get_template_name_from_instance(self, instance): + """Find template which will be used during integration.""" project_name = instance.context.data["projectName"] host_name = instance.context.data["hostName"] anatomy_data = instance.data["anatomyData"] family = anatomy_data["family"] - family = self.family_mapping.get("family") or family + family = self.family_mapping.get(family) or family task_info = anatomy_data.get("task") or {} return get_publish_template_name( From 6014cc6549d4869c81ff8cbe1acacfb30332db3a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 29 Aug 2023 16:16:31 +0200 Subject: [PATCH 10/21] Enhancement: Deadline plugins optimize, cleanup and fix optional support for validate deadline pools (#5531) * Fix optional support * Query deadline only once per url * Report both pools if both are invalid instead of only primary pool * Fix formatting in UI * Re-use existing implementation of `requests_get` * Cosmetics * Cache deadline url responses to avoid the need of request per instance * Only format error message when needed + convert to `KnownPublishError` * Allow deadline url per instance, similar to `ValidateDeadlineConnections` * Tweak grammar/readability * Fix title * Remove instance data from right side in Publish report since it's available in logs --- .../collect_deadline_server_from_instance.py | 14 +++-- .../publish/help/validate_deadline_pools.xml | 30 ++++----- .../publish/validate_deadline_connection.py | 34 ++++------ .../publish/validate_deadline_pools.py | 63 +++++++++++++------ 4 files changed, 79 insertions(+), 62 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py index eadfc3c83e..8a408d7f4f 100644 --- a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py +++ b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py @@ -8,6 +8,7 @@ attribute or using default server if that attribute doesn't exists. from maya import cmds import pyblish.api +from openpype.pipeline.publish import KnownPublishError class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): @@ -81,13 +82,14 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): if k in default_servers } - msg = ( - "\"{}\" server on instance is not enabled in project settings." - " Enabled project servers:\n{}".format( - instance_server, project_enabled_servers + if instance_server not in project_enabled_servers: + msg = ( + "\"{}\" server on instance is not enabled in project settings." + " Enabled project servers:\n{}".format( + instance_server, project_enabled_servers + ) ) - ) - assert instance_server in project_enabled_servers, msg + raise KnownPublishError(msg) self.log.debug("Using project approved server.") return project_enabled_servers[instance_server] diff --git a/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml b/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml index 0e7d72910e..aa21df3734 100644 --- a/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml +++ b/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml @@ -1,31 +1,31 @@ - Scene setting + Deadline Pools - ## Invalid Deadline pools found +## Invalid Deadline pools found - Configured pools don't match what is set in Deadline. +Configured pools don't match available pools in Deadline. - {invalid_value_str} +### How to repair? - ### How to repair? +If your instance had deadline pools set on creation, remove or +change them. - If your instance had deadline pools set on creation, remove or - change them. +In other cases inform admin to change them in Settings. - In other cases inform admin to change them in Settings. +Available deadline pools: + +{pools_str} - Available deadline pools {pools_str}. - ### __Detailed Info__ +### __Detailed Info__ - This error is shown when deadline pool is not on Deadline anymore. It - could happen in case of republish old workfile which was created with - previous deadline pools, - or someone changed pools on Deadline side, but didn't modify Openpype - Settings. +This error is shown when a configured pool is not available on Deadline. It +can happen when publishing old workfiles which were created with previous +deadline pools, or someone changed the available pools in Deadline, +but didn't modify Openpype Settings to match the changes. \ No newline at end of file diff --git a/openpype/modules/deadline/plugins/publish/validate_deadline_connection.py b/openpype/modules/deadline/plugins/publish/validate_deadline_connection.py index a30401e7dc..a7b300beff 100644 --- a/openpype/modules/deadline/plugins/publish/validate_deadline_connection.py +++ b/openpype/modules/deadline/plugins/publish/validate_deadline_connection.py @@ -1,8 +1,7 @@ -import os -import requests - import pyblish.api +from openpype_modules.deadline.abstract_submit_deadline import requests_get + class ValidateDeadlineConnection(pyblish.api.InstancePlugin): """Validate Deadline Web Service is running""" @@ -12,34 +11,25 @@ class ValidateDeadlineConnection(pyblish.api.InstancePlugin): hosts = ["maya", "nuke"] families = ["renderlayer", "render"] + # cache + responses = {} + def process(self, instance): # get default deadline webservice url from deadline module deadline_url = instance.context.data["defaultDeadline"] # if custom one is set in instance, use that if instance.data.get("deadlineUrl"): deadline_url = instance.data.get("deadlineUrl") - self.log.info( - "We have deadline URL on instance {}".format( - deadline_url)) + self.log.debug( + "We have deadline URL on instance {}".format(deadline_url) + ) assert deadline_url, "Requires Deadline Webservice URL" - # Check response - response = self._requests_get(deadline_url) + if deadline_url not in self.responses: + self.responses[deadline_url] = requests_get(deadline_url) + + response = self.responses[deadline_url] assert response.ok, "Response must be ok" assert response.text.startswith("Deadline Web Service "), ( "Web service did not respond with 'Deadline Web Service'" ) - - def _requests_get(self, *args, **kwargs): - """ Wrapper for requests, disabling SSL certificate validation if - DONT_VERIFY_SSL environment variable is found. This is useful when - Deadline or Muster server are running with self-signed certificates - and their certificate is not added to trusted certificates on - client machines. - - WARNING: disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. - """ - if 'verify' not in kwargs: - kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa - return requests.get(*args, **kwargs) diff --git a/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py b/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py index 594f0ef866..949caff7d8 100644 --- a/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py +++ b/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py @@ -25,33 +25,58 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin, "maxrender"] optional = True + # cache + pools_per_url = {} + def process(self, instance): + if not self.is_active(instance.data): + return + if not instance.data.get("farm"): self.log.debug("Skipping local instance.") return - # get default deadline webservice url from deadline module - deadline_url = instance.context.data["defaultDeadline"] - self.log.info("deadline_url::{}".format(deadline_url)) - pools = DeadlineModule.get_deadline_pools(deadline_url, log=self.log) - self.log.info("pools::{}".format(pools)) - - formatting_data = { - "pools_str": ",".join(pools) - } + deadline_url = self.get_deadline_url(instance) + pools = self.get_pools(deadline_url) + invalid_pools = {} primary_pool = instance.data.get("primaryPool") if primary_pool and primary_pool not in pools: - msg = "Configured primary '{}' not present on Deadline".format( - instance.data["primaryPool"]) - formatting_data["invalid_value_str"] = msg - raise PublishXmlValidationError(self, msg, - formatting_data=formatting_data) + invalid_pools["primary"] = primary_pool secondary_pool = instance.data.get("secondaryPool") if secondary_pool and secondary_pool not in pools: - msg = "Configured secondary '{}' not present on Deadline".format( - instance.data["secondaryPool"]) - formatting_data["invalid_value_str"] = msg - raise PublishXmlValidationError(self, msg, - formatting_data=formatting_data) + invalid_pools["secondary"] = secondary_pool + + if invalid_pools: + message = "\n".join( + "{} pool '{}' not available on Deadline".format(key.title(), + pool) + for key, pool in invalid_pools.items() + ) + raise PublishXmlValidationError( + plugin=self, + message=message, + formatting_data={"pools_str": ", ".join(pools)} + ) + + def get_deadline_url(self, instance): + # get default deadline webservice url from deadline module + deadline_url = instance.context.data["defaultDeadline"] + if instance.data.get("deadlineUrl"): + # if custom one is set in instance, use that + deadline_url = instance.data.get("deadlineUrl") + return deadline_url + + def get_pools(self, deadline_url): + if deadline_url not in self.pools_per_url: + self.log.debug( + "Querying available pools for Deadline url: {}".format( + deadline_url) + ) + pools = DeadlineModule.get_deadline_pools(deadline_url, + log=self.log) + self.log.info("Available pools: {}".format(pools)) + self.pools_per_url[deadline_url] = pools + + return self.pools_per_url[deadline_url] From c157f74b498eb5ace1822792b5813e964ac79ebc Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 29 Aug 2023 16:45:09 +0200 Subject: [PATCH 11/21] Fix double spaces in message (#5190) --- openpype/plugins/publish/validate_version.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/validate_version.py b/openpype/plugins/publish/validate_version.py index 2b919a3119..84d52fab73 100644 --- a/openpype/plugins/publish/validate_version.py +++ b/openpype/plugins/publish/validate_version.py @@ -25,16 +25,16 @@ class ValidateVersion(pyblish.api.InstancePlugin): # TODO: Remove full non-html version upon drop of old publisher msg = ( "Version '{0}' from instance '{1}' that you are " - " trying to publish is lower or equal to an existing version " - " in the database. Version in database: '{2}'." + "trying to publish is lower or equal to an existing version " + "in the database. Version in database: '{2}'." "Please version up your workfile to a higher version number " "than: '{2}'." ).format(version, instance.data["name"], latest_version) msg_html = ( "Version {0} from instance {1} that you are " - " trying to publish is lower or equal to an existing version " - " in the database. Version in database: {2}.

" + "trying to publish is lower or equal to an existing version " + "in the database. Version in database: {2}.

" "Please version up your workfile to a higher version number " "than: {2}." ).format(version, instance.data["name"], latest_version) From e56d3530cb7668bf92b78a61be61508b23ee89fb Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 29 Aug 2023 17:31:49 +0200 Subject: [PATCH 12/21] Chore: Queued event system (#5514) * implemented queued event system * implemented basic tests --- openpype/lib/events.py | 90 +++++++++++++++++++- tests/unit/openpype/lib/test_event_system.py | 83 ++++++++++++++++++ 2 files changed, 171 insertions(+), 2 deletions(-) create mode 100644 tests/unit/openpype/lib/test_event_system.py diff --git a/openpype/lib/events.py b/openpype/lib/events.py index dca58fcf93..496b765a05 100644 --- a/openpype/lib/events.py +++ b/openpype/lib/events.py @@ -3,6 +3,7 @@ import os import re import copy import inspect +import collections import logging import weakref from uuid import uuid4 @@ -340,8 +341,8 @@ class EventSystem(object): event.emit() return event - def emit_event(self, event): - """Emit event object. + def _process_event(self, event): + """Process event topic and trigger callbacks. Args: event (Event): Prepared event with topic and data. @@ -356,6 +357,91 @@ class EventSystem(object): for callback in invalid_callbacks: self._registered_callbacks.remove(callback) + def emit_event(self, event): + """Emit event object. + + Args: + event (Event): Prepared event with topic and data. + """ + + self._process_event(event) + + +class QueuedEventSystem(EventSystem): + """Events are automatically processed in queue. + + If callback triggers another event, the event is not processed until + all callbacks of previous event are processed. + + Allows to implement custom event process loop by changing 'auto_execute'. + + Note: + This probably should be default behavior of 'EventSystem'. Changing it + now could cause problems in existing code. + + Args: + auto_execute (Optional[bool]): If 'True', events are processed + automatically. Custom loop calling 'process_next_event' + must be implemented when set to 'False'. + """ + + def __init__(self, auto_execute=True): + super(QueuedEventSystem, self).__init__() + self._event_queue = collections.deque() + self._current_event = None + self._auto_execute = auto_execute + + def __len__(self): + return self.count() + + def count(self): + """Get number of events in queue. + + Returns: + int: Number of events in queue. + """ + + return len(self._event_queue) + + def process_next_event(self): + """Process next event in queue. + + Should be used only if 'auto_execute' is set to 'False'. Only single + event is processed. + + Returns: + Union[Event, None]: Processed event. + """ + + if self._current_event is not None: + raise ValueError("An event is already in progress.") + + if not self._event_queue: + return None + event = self._event_queue.popleft() + self._current_event = event + self._process_event(event) + self._current_event = None + return event + + def emit_event(self, event): + """Emit event object. + + Args: + event (Event): Prepared event with topic and data. + """ + + if not self._auto_execute or self._current_event is not None: + self._event_queue.append(event) + return + + self._event_queue.append(event) + while self._event_queue: + event = self._event_queue.popleft() + self._current_event = event + self._process_event(event) + self._current_event = None + class GlobalEventSystem: """Event system living in global scope of process. diff --git a/tests/unit/openpype/lib/test_event_system.py b/tests/unit/openpype/lib/test_event_system.py new file mode 100644 index 0000000000..aa3f929065 --- /dev/null +++ b/tests/unit/openpype/lib/test_event_system.py @@ -0,0 +1,83 @@ +from openpype.lib.events import EventSystem, QueuedEventSystem + + +def test_default_event_system(): + output = [] + expected_output = [3, 2, 1] + event_system = EventSystem() + + def callback_1(): + event_system.emit("topic.2", {}, None) + output.append(1) + + def callback_2(): + event_system.emit("topic.3", {}, None) + output.append(2) + + def callback_3(): + output.append(3) + + event_system.add_callback("topic.1", callback_1) + event_system.add_callback("topic.2", callback_2) + event_system.add_callback("topic.3", callback_3) + + event_system.emit("topic.1", {}, None) + + assert output == expected_output, ( + "Callbacks were not called in correct order") + + +def test_base_event_system_queue(): + output = [] + expected_output = [1, 2, 3] + event_system = QueuedEventSystem() + + def callback_1(): + event_system.emit("topic.2", {}, None) + output.append(1) + + def callback_2(): + event_system.emit("topic.3", {}, None) + output.append(2) + + def callback_3(): + output.append(3) + + event_system.add_callback("topic.1", callback_1) + event_system.add_callback("topic.2", callback_2) + event_system.add_callback("topic.3", callback_3) + + event_system.emit("topic.1", {}, None) + + assert output == expected_output, ( + "Callbacks were not called in correct order") + + +def test_manual_event_system_queue(): + output = [] + expected_output = [1, 2, 3] + event_system = QueuedEventSystem(auto_execute=False) + + def callback_1(): + event_system.emit("topic.2", {}, None) + output.append(1) + + def callback_2(): + event_system.emit("topic.3", {}, None) + output.append(2) + + def callback_3(): + output.append(3) + + event_system.add_callback("topic.1", callback_1) + event_system.add_callback("topic.2", callback_2) + event_system.add_callback("topic.3", callback_3) + + event_system.emit("topic.1", {}, None) + + while True: + if event_system.process_next_event() is None: + break + + assert output == expected_output, ( + "Callbacks were not called in correct order") From 04145020f6c41014697ec171d9ede8a389506dce Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Aug 2023 17:35:48 +0200 Subject: [PATCH 13/21] Tests: fix unit tests (#5533) * Changed test zip file location * Updated tests and updated Unreal plugin Unreal plugin was previously ejected into general one, which was later removed as unnecessary. In Unreal plugin were kept fixed bugs from general one (slate issue, better name pattern for clique) * Updated raised exception type --- .../publish/validate_sequence_frames.py | 20 ++++++- .../publish/test_validate_sequence_frames.py | 53 +++++-------------- tests/unit/openpype/lib/test_delivery.py | 3 +- .../sync_server/test_site_operations.py | 9 ++-- 4 files changed, 39 insertions(+), 46 deletions(-) diff --git a/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py b/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py index 76bb25fac3..96485d5a2d 100644 --- a/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py +++ b/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py @@ -1,4 +1,6 @@ import clique +import os +import re import pyblish.api @@ -21,7 +23,19 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin): representations = instance.data.get("representations") for repr in representations: data = instance.data.get("assetEntity", {}).get("data", {}) - patterns = [clique.PATTERNS["frames"]] + repr_files = repr["files"] + if isinstance(repr_files, str): + continue + + ext = repr.get("ext") + if not ext: + _, ext = os.path.splitext(repr_files[0]) + elif not ext.startswith("."): + ext = ".{}".format(ext) + pattern = r"\D?(?P(?P0*)\d+){}$".format( + re.escape(ext)) + patterns = [pattern] + collections, remainder = clique.assemble( repr["files"], minimum_items=1, patterns=patterns) @@ -30,6 +44,10 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin): collection = collections[0] frames = list(collection.indexes) + if instance.data.get("slate"): + # Slate is not part of the frame range + frames = frames[1:] + current_range = (frames[0], frames[-1]) required_range = (data["clipIn"], data["clipOut"]) diff --git a/tests/unit/openpype/hosts/unreal/plugins/publish/test_validate_sequence_frames.py b/tests/unit/openpype/hosts/unreal/plugins/publish/test_validate_sequence_frames.py index 17e47c9f64..f472b8052a 100644 --- a/tests/unit/openpype/hosts/unreal/plugins/publish/test_validate_sequence_frames.py +++ b/tests/unit/openpype/hosts/unreal/plugins/publish/test_validate_sequence_frames.py @@ -19,7 +19,7 @@ import logging from pyblish.api import Instance as PyblishInstance from tests.lib.testing_classes import BaseTest -from openpype.plugins.publish.validate_sequence_frames import ( +from openpype.hosts.unreal.plugins.publish.validate_sequence_frames import ( ValidateSequenceFrames ) @@ -38,7 +38,13 @@ class TestValidateSequenceFrames(BaseTest): data = { "frameStart": 1001, "frameEnd": 1002, - "representations": [] + "representations": [], + "assetEntity": { + "data": { + "clipIn": 1001, + "clipOut": 1002, + } + } } yield Instance @@ -58,6 +64,7 @@ class TestValidateSequenceFrames(BaseTest): ] instance.data["representations"] = representations instance.data["frameEnd"] = 1001 + instance.data["assetEntity"]["data"]["clipOut"] = 1001 plugin.process(instance) @@ -84,49 +91,11 @@ class TestValidateSequenceFrames(BaseTest): plugin.process(instance) - @pytest.mark.parametrize("files", - [["Main_beauty.1001.v001.exr", - "Main_beauty.1002.v001.exr"]]) - def test_validate_sequence_frames_wrong_name(self, instance, - plugin, files): - # tests for names with number inside, caused clique failure before - representations = [ - { - "ext": "exr", - "files": files, - } - ] - instance.data["representations"] = representations - - with pytest.raises(AssertionError) as excinfo: - plugin.process(instance) - assert ("Must detect single collection" in - str(excinfo.value)) - - @pytest.mark.parametrize("files", - [["Main_beauty.v001.1001.ass.gz", - "Main_beauty.v001.1002.ass.gz"]]) - def test_validate_sequence_frames_possible_wrong_name( - self, instance, plugin, files): - # currently pattern fails on extensions with dots - representations = [ - { - "files": files, - } - ] - instance.data["representations"] = representations - - with pytest.raises(AssertionError) as excinfo: - plugin.process(instance) - assert ("Must not have remainder" in - str(excinfo.value)) - @pytest.mark.parametrize("files", [["Main_beauty.v001.1001.ass.gz", "Main_beauty.v001.1002.ass.gz"]]) def test_validate_sequence_frames__correct_ext( self, instance, plugin, files): - # currently pattern fails on extensions with dots representations = [ { "ext": "ass.gz", @@ -147,6 +116,7 @@ class TestValidateSequenceFrames(BaseTest): ] instance.data["representations"] = representations instance.data["frameEnd"] = 1003 + instance.data["assetEntity"]["data"]["clipOut"] = 1003 plugin.process(instance) @@ -160,6 +130,7 @@ class TestValidateSequenceFrames(BaseTest): ] instance.data["representations"] = representations instance.data["frameEnd"] = 1003 + instance.data["assetEntity"]["data"]["clipOut"] = 1003 with pytest.raises(ValueError) as excinfo: plugin.process(instance) @@ -175,6 +146,7 @@ class TestValidateSequenceFrames(BaseTest): ] instance.data["representations"] = representations instance.data["frameEnd"] = 1003 + instance.data["assetEntity"]["data"]["clipOut"] = 1003 with pytest.raises(AssertionError) as excinfo: plugin.process(instance) @@ -195,6 +167,7 @@ class TestValidateSequenceFrames(BaseTest): instance.data["slate"] = True instance.data["representations"] = representations instance.data["frameEnd"] = 1003 + instance.data["assetEntity"]["data"]["clipOut"] = 1003 plugin.process(instance) diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py index 04a71655e3..f1e435f3f8 100644 --- a/tests/unit/openpype/lib/test_delivery.py +++ b/tests/unit/openpype/lib/test_delivery.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """Test suite for delivery functions.""" -from openpype.lib.delivery import collect_frames +from openpype.lib import collect_frames def test_collect_frames_multi_sequence(): @@ -153,4 +153,3 @@ def test_collect_frames_single_file(): print(ret) assert ret == expected, "Not matching" - diff --git a/tests/unit/openpype/modules/sync_server/test_site_operations.py b/tests/unit/openpype/modules/sync_server/test_site_operations.py index 6a861100a4..c4a83e33a6 100644 --- a/tests/unit/openpype/modules/sync_server/test_site_operations.py +++ b/tests/unit/openpype/modules/sync_server/test_site_operations.py @@ -12,16 +12,19 @@ removes temporary databases (?) """ import pytest +from bson.objectid import ObjectId from tests.lib.testing_classes import ModuleUnitTest -from bson.objectid import ObjectId + +from openpype.modules.sync_server.utils import SiteAlreadyPresentError + class TestSiteOperation(ModuleUnitTest): REPRESENTATION_ID = "60e578d0c987036c6a7b741d" - TEST_FILES = [("1eCwPljuJeOI8A3aisfOIBKKjcmIycTEt", + TEST_FILES = [("1FHE70Hi7y05LLT_1O3Y6jGxwZGXKV9zX", "test_site_operations.zip", '')] @pytest.fixture(scope="module") @@ -71,7 +74,7 @@ class TestSiteOperation(ModuleUnitTest): @pytest.mark.usefixtures("setup_sync_server_module") def test_add_site_again(self, dbcon, setup_sync_server_module): """Depends on test_add_site, must throw exception.""" - with pytest.raises(ValueError): + with pytest.raises(SiteAlreadyPresentError): setup_sync_server_module.add_site(self.TEST_PROJECT_NAME, self.REPRESENTATION_ID, site_name='test_site') From ed53ef12d5befa621965847016c375033e1b24b6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 29 Aug 2023 17:48:33 +0200 Subject: [PATCH 14/21] Chore: PowerShell script for docker build (#5535) * added powershell script to build using docker * fix empty variant * make sure build folder exists * added docker_build.ps1 to readme * Tweaked readme to include reason for docker_build.ps1 --------- Co-authored-by: Petr Kalis --- README.md | 4 ++ tools/docker_build.ps1 | 98 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 102 insertions(+) create mode 100644 tools/docker_build.ps1 diff --git a/README.md b/README.md index 6caed8061c..92f1cb62dc 100644 --- a/README.md +++ b/README.md @@ -144,6 +144,10 @@ sudo ./tools/docker_build.sh centos7 If all is successful, you'll find built OpenPype in `./build/` folder. +Docker build can be also started from Windows machine, just use `./tools/docker_build.ps1` instead of shell script. + +This could be used even for building linux build (with argument `centos7` or `debian`) + #### Manual build You will need [Python >= 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll also need [curl](https://curl.se) on systems that doesn't have one preinstalled. diff --git a/tools/docker_build.ps1 b/tools/docker_build.ps1 new file mode 100644 index 0000000000..392165288c --- /dev/null +++ b/tools/docker_build.ps1 @@ -0,0 +1,98 @@ +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$repo_root = (Get-Item $script_dir).parent.FullName + +$env:PSModulePath = $env:PSModulePath + ";$($repo_root)\tools\modules\powershell" + +function Exit-WithCode($exitcode) { + # Only exit this host process if it's a child of another PowerShell parent process... + $parentPID = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$PID" | Select-Object -Property ParentProcessId).ParentProcessId + $parentProcName = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$parentPID" | Select-Object -Property Name).Name + if ('powershell.exe' -eq $parentProcName) { $host.SetShouldExit($exitcode) } + + exit $exitcode +} + +function Restore-Cwd() { + $tmp_current_dir = Get-Location + if ("$tmp_current_dir" -ne "$current_dir") { + Write-Color -Text ">>> ", "Restoring current directory" -Color Green, Gray + Set-Location -Path $current_dir + } +} + +function Get-Container { + if (-not (Test-Path -PathType Leaf -Path "$($repo_root)\build\docker-image.id")) { + Write-Color -Text "!!! ", "Docker command failed, cannot find image id." -Color Red, Yellow + Restore-Cwd + Exit-WithCode 1 + } + $id = Get-Content "$($repo_root)\build\docker-image.id" + Write-Color -Text ">>> ", "Creating container from image id ", "[", $id, "]" -Color Green, Gray, White, Cyan, White + $cid = docker create $id bash + if ($LASTEXITCODE -ne 0) { + Write-Color -Text "!!! ", "Cannot create container." -Color Red, Yellow + Restore-Cwd + Exit-WithCode 1 + } + return $cid +} + +function Change-Cwd() { + Set-Location -Path $repo_root +} + +function New-DockerBuild { + $version_file = Get-Content -Path "$($repo_root)\openpype\version.py" + $result = [regex]::Matches($version_file, '__version__ = "(?\d+\.\d+.\d+.*)"') + $openpype_version = $result[0].Groups['version'].Value + $startTime = [int][double]::Parse((Get-Date -UFormat %s)) + Write-Color -Text ">>> ", "Building OpenPype using Docker ..." -Color Green, Gray, White + $variant = $args[0] + if ($variant.Length -eq 0) { + $dockerfile = "$($repo_root)\Dockerfile" + } else { + $dockerfile = "$( $repo_root )\Dockerfile.$variant" + } + if (-not (Test-Path -PathType Leaf -Path $dockerfile)) { + Write-Color -Text "!!! ", "Dockerfile for specifed platform ", "[", $variant, "]", "doesn't exist." -Color Red, Yellow, Cyan, White, Cyan, Yellow + Restore-Cwd + Exit-WithCode 1 + } + Write-Color -Text ">>> ", "Using Dockerfile for ", "[ ", $variant, " ]" -Color Green, Gray, White, Cyan, White + + $build_dir = "$($repo_root)\build" + if (-not(Test-Path $build_dir)) { + New-Item -ItemType Directory -Path $build_dir + } + Write-Color -Text "--- ", "Cleaning build directory ..." -Color Yellow, Gray + try { + Remove-Item -Recurse -Force "$($build_dir)\*" + } catch { + Write-Color -Text "!!! ", "Cannot clean build directory, possibly because process is using it." -Color Red, Gray + Write-Color -Text $_.Exception.Message -Color Red + Exit-WithCode 1 + } + + Write-Color -Text ">>> ", "Running Docker build ..." -Color Green, Gray, White + docker build --pull --iidfile $repo_root/build/docker-image.id --build-arg BUILD_DATE=$(Get-Date -UFormat %Y-%m-%dT%H:%M:%SZ) --build-arg VERSION=$openpype_version -t pypeclub/openpype:$openpype_version -f $dockerfile . + if ($LASTEXITCODE -ne 0) { + Write-Color -Text "!!! ", "Docker command failed.", $LASTEXITCODE -Color Red, Yellow, Red + Restore-Cwd + Exit-WithCode 1 + } + Write-Color -Text ">>> ", "Copying build from container ..." -Color Green, Gray, White + $cid = Get-Container + + docker cp "$($cid):/opt/openpype/build/exe.linux-x86_64-3.9" "$($repo_root)/build" + docker cp "$($cid):/opt/openpype/build/build.log" "$($repo_root)/build" + + $endTime = [int][double]::Parse((Get-Date -UFormat %s)) + try { + New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype build complete!", "All done in $( $endTime - $startTime ) secs. You will find OpenPype and build log in build directory." + } catch {} + Write-Color -Text "*** ", "All done in ", $($endTime - $startTime), " secs. You will find OpenPype and build log in ", "'.\build'", " directory." -Color Green, Gray, White, Gray, White, Gray +} + +Change-Cwd +New-DockerBuild $ARGS From 65bd128d510598b00b9b51f48a7eddc9ad519abc Mon Sep 17 00:00:00 2001 From: Mustafa Zarkash Date: Tue, 29 Aug 2023 19:03:58 +0300 Subject: [PATCH 15/21] Enhancement: Update houdini main menu (#5527) * update houdini main menu * add separator --- openpype/hosts/houdini/startup/MainMenuCommon.xml | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/startup/MainMenuCommon.xml b/openpype/hosts/houdini/startup/MainMenuCommon.xml index 47a4653d5d..5818a117eb 100644 --- a/openpype/hosts/houdini/startup/MainMenuCommon.xml +++ b/openpype/hosts/houdini/startup/MainMenuCommon.xml @@ -2,7 +2,19 @@ - + + + + + + From 948687e7a236a3767b07c78eba07a13663234be0 Mon Sep 17 00:00:00 2001 From: Ynbot Date: Wed, 30 Aug 2023 03:24:53 +0000 Subject: [PATCH 16/21] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 6d89e1eeae..12f797228b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.16.5-nightly.2" +__version__ = "3.16.5-nightly.3" From e426aca7213a32fcde9ca5b7e923444e29a54049 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Wed, 30 Aug 2023 03:25:28 +0000 Subject: [PATCH 17/21] chore(): update bug report / version --- .github/ISSUE_TEMPLATE/bug_report.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index d7e49de5cb..669bf391cd 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -35,6 +35,7 @@ body: label: Version description: What version are you running? Look to OpenPype Tray options: + - 3.16.5-nightly.3 - 3.16.5-nightly.2 - 3.16.5-nightly.1 - 3.16.4 @@ -134,7 +135,6 @@ body: - 3.14.8 - 3.14.8-nightly.4 - 3.14.8-nightly.3 - - 3.14.8-nightly.2 validations: required: true - type: dropdown From b83a40931385b0688cad416a7a998fb9b3f6f7c1 Mon Sep 17 00:00:00 2001 From: sjt-rvx <72554834+sjt-rvx@users.noreply.github.com> Date: Wed, 30 Aug 2023 13:12:03 +0000 Subject: [PATCH 18/21] have the addons loading respect a custom AYON_ADDONS_DIR (#5539) * have the addons loading respect a custom AYON_ADDONS_DIR * Update openpype/modules/base.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --------- Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/modules/base.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 9b3637c48a..84e213288c 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -373,10 +373,12 @@ def _load_ayon_addons(openpype_modules, modules_key, log): addons_info = _get_ayon_addons_information() if not addons_info: return v3_addons_to_skip - addons_dir = os.path.join( - appdirs.user_data_dir("AYON", "Ynput"), - "addons" - ) + addons_dir = os.environ.get("AYON_ADDONS_DIR") + if not addons_dir: + addons_dir = os.path.join( + appdirs.user_data_dir("AYON", "Ynput"), + "addons" + ) if not os.path.exists(addons_dir): log.warning("Addons directory does not exists. Path \"{}\"".format( addons_dir From 3c3438532018c6360b77a2b5c86959638e73d86f Mon Sep 17 00:00:00 2001 From: Mustafa Zarkash Date: Wed, 30 Aug 2023 16:35:09 +0300 Subject: [PATCH 19/21] Houdini: Improve VDB Selection (#5523) * improve sop selection * resolve hound conversations * resolve BigRoy's comments --- .../plugins/create/create_vbd_cache.py | 62 ++++++++++++++++++- 1 file changed, 61 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py index c015cebd49..9c96e48e3a 100644 --- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py @@ -33,7 +33,7 @@ class CreateVDBCache(plugin.HoudiniCreator): } if self.selected_nodes: - parms["soppath"] = self.selected_nodes[0].path() + parms["soppath"] = self.get_sop_node_path(self.selected_nodes[0]) instance_node.setParms(parms) @@ -42,3 +42,63 @@ class CreateVDBCache(plugin.HoudiniCreator): hou.ropNodeTypeCategory(), hou.sopNodeTypeCategory() ] + + def get_sop_node_path(self, selected_node): + """Get Sop Path of the selected node. + + Although Houdini allows ObjNode path on `sop_path` for the + the ROP node, we prefer it set to the SopNode path explicitly. + """ + + # Allow sop level paths (e.g. /obj/geo1/box1) + if isinstance(selected_node, hou.SopNode): + self.log.debug( + "Valid SopNode selection, 'SOP Path' in ROP will" + " be set to '%s'.", selected_node.path() + ) + return selected_node.path() + + # Allow object level paths to Geometry nodes (e.g. /obj/geo1) + # but do not allow other object level nodes types like cameras, etc. + elif isinstance(selected_node, hou.ObjNode) and \ + selected_node.type().name() == "geo": + + # Try to find output node. + sop_node = self.get_obj_output(selected_node) + if sop_node: + self.log.debug( + "Valid ObjNode selection, 'SOP Path' in ROP will " + "be set to the child path '%s'.", sop_node.path() + ) + return sop_node.path() + + self.log.debug( + "Selection isn't valid. 'SOP Path' in ROP will be empty." + ) + return "" + + def get_obj_output(self, obj_node): + """Try to find output node. + + If any output nodes are present, return the output node with + the minimum 'outputidx' + If no output nodes are present, return the node with display flag + If no nodes are present at all, return None + """ + + outputs = obj_node.subnetOutputs() + + # if obj_node is empty + if not outputs: + return + + # if obj_node has one output child whether its + # sop output node or a node with the render flag + elif len(outputs) == 1: + return outputs[0] + + # if there are more than one, then it has multiple output nodes + # return the one with the minimum 'outputidx' + else: + return min(outputs, + key=lambda node: node.evalParm('outputidx')) From a60c3d7ce304f648e13c6ab712a9a558b2186a65 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 30 Aug 2023 16:36:42 +0200 Subject: [PATCH 20/21] use correct git url in README (#5542) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 92f1cb62dc..ce98f845e6 100644 --- a/README.md +++ b/README.md @@ -62,7 +62,7 @@ development tools like [CMake](https://cmake.org/) and [Visual Studio](https://v #### Clone repository: ```sh -git clone --recurse-submodules git@github.com:Pypeclub/OpenPype.git +git clone --recurse-submodules git@github.com:ynput/OpenPype.git ``` #### To build OpenPype: From 74d612208ec4dc0e3a31891ce93dfca5a02d4d48 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 30 Aug 2023 17:47:03 +0200 Subject: [PATCH 21/21] AYON: Deadline expand userpaths in executables list (#5540) * expand userpaths set in executables list * Update logic for searching executable with expanduser --------- Co-authored-by: Petr Kalis --- .../deadline/repository/custom/plugins/Ayon/Ayon.py | 8 +++++++- .../repository/custom/plugins/GlobalJobPreLoad.py | 9 ++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/Ayon/Ayon.py b/openpype/modules/deadline/repository/custom/plugins/Ayon/Ayon.py index 1544acc2a4..a29acf9823 100644 --- a/openpype/modules/deadline/repository/custom/plugins/Ayon/Ayon.py +++ b/openpype/modules/deadline/repository/custom/plugins/Ayon/Ayon.py @@ -91,7 +91,13 @@ class AyonDeadlinePlugin(DeadlinePlugin): # clean '\ ' for MacOS pasting if platform.system().lower() == "darwin": exe_list = exe_list.replace("\\ ", " ") - exe = FileUtils.SearchFileList(exe_list) + + expanded_paths = [] + for path in exe_list.split(";"): + if path.startswith("~"): + path = os.path.expanduser(path) + expanded_paths.append(path) + exe = FileUtils.SearchFileList(";".join(expanded_paths)) if exe == "": self.FailRender( diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 5f7e1f1032..97875215ae 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -547,7 +547,14 @@ def get_ayon_executable(): # clean '\ ' for MacOS pasting if platform.system().lower() == "darwin": exe_list = exe_list.replace("\\ ", " ") - return exe_list + + # Expand user paths + expanded_paths = [] + for path in exe_list.split(";"): + if path.startswith("~"): + path = os.path.expanduser(path) + expanded_paths.append(path) + return ";".join(expanded_paths) def inject_render_job_id(deadlinePlugin):