diff --git a/ayon_start.py b/ayon_start.py index 11677b4415..1e791f4f4f 100644 --- a/ayon_start.py +++ b/ayon_start.py @@ -96,6 +96,19 @@ else: sys.path.append(_dependencies_path) _python_paths.append(_dependencies_path) +# ------------------------------------------------- +# Temporary solution to add ayon_api to python path +# ------------------------------------------------- +# This is to avoid need of new build & release when ayon-python-api is updated. +ayon_dependency_dir = os.path.join( + AYON_ROOT, "openpype", "vendor", "python", "ayon" +) +if ayon_dependency_dir in _python_paths: + _python_paths.remove(ayon_dependency_dir) +_python_paths.insert(0, _dependencies_path) +sys.path.insert(0, ayon_dependency_dir) +# ------------------------------------------------- + # Vendored python modules that must not be in PYTHONPATH environment but # are required for OpenPype processes sys.path.insert(0, os.path.join(AYON_ROOT, "vendor", "python")) diff --git a/openpype/vendor/python/ayon/ayon_api/__init__.py b/openpype/vendor/python/ayon/ayon_api/__init__.py new file mode 100644 index 0000000000..700c1b3687 --- /dev/null +++ b/openpype/vendor/python/ayon/ayon_api/__init__.py @@ -0,0 +1,256 @@ +from .utils import ( + TransferProgress, + slugify_string, +) +from .server_api import ( + ServerAPI, +) + +from ._api import ( + GlobalServerAPI, + ServiceContext, + + init_service, + get_service_name, + get_service_addon_name, + get_service_addon_version, + get_service_addon_settings, + + is_connection_created, + create_connection, + close_connection, + change_token, + set_environments, + get_server_api_connection, + get_site_id, + set_site_id, + get_client_version, + set_client_version, + get_default_settings_variant, + set_default_settings_variant, + + get_base_url, + get_rest_url, + + raw_get, + raw_post, + raw_put, + raw_patch, + raw_delete, + + get, + post, + put, + patch, + delete, + + get_event, + get_events, + dispatch_event, + update_event, + enroll_event_job, + + download_file, + upload_file, + + query_graphql, + + get_addons_info, + download_addon_private_file, + + get_dependencies_info, + update_dependency_info, + + download_dependency_package, + upload_dependency_package, + delete_dependency_package, + + get_user, + get_users, + + get_attributes_for_type, + get_default_fields_for_type, + + get_project_anatomy_preset, + get_project_anatomy_presets, + get_project_roots_by_site, + get_project_roots_for_site, + + get_addon_site_settings_schema, + get_addon_settings_schema, + + get_addon_studio_settings, + get_addon_project_settings, + get_addon_settings, + get_addons_studio_settings, + get_addons_project_settings, + get_addons_settings, + + get_projects, + get_project, + create_project, + delete_project, + + get_folder_by_id, + get_folder_by_name, + get_folder_by_path, + get_folders, + + get_tasks, + + get_folder_ids_with_subsets, + get_subset_by_id, + get_subset_by_name, + get_subsets, + get_subset_families, + + get_version_by_id, + get_version_by_name, + version_is_latest, + get_versions, + get_hero_version_by_subset_id, + get_hero_version_by_id, + get_hero_versions, + get_last_versions, + get_last_version_by_subset_id, + get_last_version_by_subset_name, + get_representation_by_id, + get_representation_by_name, + get_representations, + get_representations_parents, + get_representation_parents, + get_repre_ids_by_context_filters, + + create_thumbnail, + get_thumbnail, + get_folder_thumbnail, + get_version_thumbnail, + get_workfile_thumbnail, +) + + +__all__ = ( + "TransferProgress", + "slugify_string", + + "ServerAPI", + + "GlobalServerAPI", + "ServiceContext", + + "init_service", + "get_service_name", + "get_service_addon_name", + "get_service_addon_version", + "get_service_addon_settings", + + "is_connection_created", + "create_connection", + "close_connection", + "change_token", + "set_environments", + "get_server_api_connection", + "get_site_id", + "set_site_id", + "get_client_version", + "set_client_version", + "get_default_settings_variant", + "set_default_settings_variant", + + "get_base_url", + "get_rest_url", + + "raw_get", + "raw_post", + "raw_put", + "raw_patch", + "raw_delete", + + "get", + "post", + "put", + "patch", + "delete", + + "get_event", + "get_events", + "dispatch_event", + "update_event", + "enroll_event_job", + + "download_file", + "upload_file", + + "query_graphql", + + "get_addons_info", + "download_addon_private_file", + + "get_dependencies_info", + "update_dependency_info", + + "download_dependency_package", + "upload_dependency_package", + "delete_dependency_package", + + "get_user", + "get_users", + + "get_attributes_for_type", + "get_default_fields_for_type", + + "get_project_anatomy_preset", + "get_project_anatomy_presets", + "get_project_roots_by_site", + "get_project_roots_for_site", + + "get_addon_site_settings_schema", + "get_addon_settings_schema", + "get_addon_studio_settings", + "get_addon_project_settings", + "get_addon_settings", + "get_addons_studio_settings", + "get_addons_project_settings", + "get_addons_settings", + + "get_projects", + "get_project", + "create_project", + "delete_project", + + "get_folder_by_id", + "get_folder_by_name", + "get_folder_by_path", + "get_folders", + + "get_tasks", + + "get_folder_ids_with_subsets", + "get_subset_by_id", + "get_subset_by_name", + "get_subsets", + "get_subset_families", + + "get_version_by_id", + "get_version_by_name", + "version_is_latest", + "get_versions", + "get_hero_version_by_subset_id", + "get_hero_version_by_id", + "get_hero_versions", + "get_last_versions", + "get_last_version_by_subset_id", + "get_last_version_by_subset_name", + "get_representation_by_id", + "get_representation_by_name", + "get_representations", + "get_representations_parents", + "get_representation_parents", + "get_repre_ids_by_context_filters", + + "create_thumbnail", + "get_thumbnail", + "get_folder_thumbnail", + "get_version_thumbnail", + "get_workfile_thumbnail", +) diff --git a/openpype/vendor/python/ayon/ayon_api/_api.py b/openpype/vendor/python/ayon/ayon_api/_api.py new file mode 100644 index 0000000000..6410b459eb --- /dev/null +++ b/openpype/vendor/python/ayon/ayon_api/_api.py @@ -0,0 +1,811 @@ +"""Singleton based server api for direct access. + +This implementation will be probably the most used part of package. Gives +option to have singleton connection to Server URL based on environment variable +values. All public functions and classes are imported in '__init__.py' so +they're available directly in top module import. +""" + +import os +import socket + +from .constants import ( + SERVER_URL_ENV_KEY, + SERVER_TOKEN_ENV_KEY, +) +from .server_api import ServerAPI +from .exceptions import FailedServiceInit + + +class GlobalServerAPI(ServerAPI): + """Extended server api which also handles storing tokens and url. + + Created object expect to have set environment variables + 'AYON_SERVER_URL'. Also is expecting filled 'AYON_TOKEN' + but that can be filled afterwards with calling 'login' method. + """ + + def __init__(self, site_id=None, client_version=None): + url = self.get_url() + token = self.get_token() + + super(GlobalServerAPI, self).__init__(url, token, site_id, client_version) + + self.validate_server_availability() + self.create_session() + + def login(self, username, password): + """Login to the server or change user. + + If user is the same as current user and token is available the + login is skipped. + """ + + previous_token = self._access_token + super(GlobalServerAPI, self).login(username, password) + if self.has_valid_token and previous_token != self._access_token: + os.environ[SERVER_TOKEN_ENV_KEY] = self._access_token + + @staticmethod + def get_url(): + return os.environ.get(SERVER_URL_ENV_KEY) + + @staticmethod + def get_token(): + return os.environ.get(SERVER_TOKEN_ENV_KEY) + + @staticmethod + def set_environments(url, token): + """Change url and token environemnts in currently running process. + + Args: + url (str): New server url. + token (str): User's token. + """ + + os.environ[SERVER_URL_ENV_KEY] = url or "" + os.environ[SERVER_TOKEN_ENV_KEY] = token or "" + + +class GlobalContext: + """Singleton connection holder. + + Goal is to avoid create connection on import which can be dangerous in + some cases. + """ + + _connection = None + + @classmethod + def is_connection_created(cls): + return cls._connection is not None + + @classmethod + def change_token(cls, url, token): + GlobalServerAPI.set_environments(url, token) + if cls._connection is None: + return + + if cls._connection.get_base_url() == url: + cls._connection.set_token(token) + else: + cls.close_connection() + + @classmethod + def close_connection(cls): + if cls._connection is not None: + cls._connection.close_session() + cls._connection = None + + @classmethod + def create_connection(cls, *args, **kwargs): + if cls._connection is not None: + cls.close_connection() + cls._connection = GlobalServerAPI(*args, **kwargs) + return cls._connection + + @classmethod + def get_server_api_connection(cls): + if cls._connection is None: + cls.create_connection() + return cls._connection + + +class ServiceContext: + """Helper for services running under server. + + When service is running from server the process receives information about + connection from environment variables. This class helps to initialize the + values without knowing environment variables (that may change over time). + + All what must be done is to call 'init_service' function/method. The + arguments are for cases when the service is running in specific environment + and their values are e.g. loaded from private file or for testing purposes. + """ + + token = None + server_url = None + addon_name = None + addon_version = None + service_name = None + + @staticmethod + def get_value_from_envs(env_keys, value=None): + if value: + return value + + for env_key in env_keys: + value = os.environ.get(env_key) + if value: + break + return value + + @classmethod + def init_service( + cls, + token=None, + server_url=None, + addon_name=None, + addon_version=None, + service_name=None, + connect=True + ): + token = cls.get_value_from_envs( + ("AY_API_KEY", "AYON_TOKEN"), + token + ) + server_url = cls.get_value_from_envs( + ("AY_SERVER_URL", "AYON_SERVER_URL"), + server_url + ) + if not server_url: + raise FailedServiceInit("URL to server is not set") + + if not token: + raise FailedServiceInit( + "Token to server {} is not set".format(server_url) + ) + + addon_name = cls.get_value_from_envs( + ("AY_ADDON_NAME", "AYON_ADDON_NAME"), + addon_name + ) + addon_version = cls.get_value_from_envs( + ("AY_ADDON_VERSION", "AYON_ADDON_VERSION"), + addon_version + ) + service_name = cls.get_value_from_envs( + ("AY_SERVICE_NAME", "AYON_SERVICE_NAME"), + service_name + ) + + cls.token = token + cls.server_url = server_url + cls.addon_name = addon_name + cls.addon_version = addon_version + cls.service_name = service_name or socket.gethostname() + + # Make sure required environments for GlobalServerAPI are set + GlobalServerAPI.set_environments(cls.server_url, cls.token) + + if connect: + print("Connecting to server \"{}\"".format(server_url)) + con = GlobalContext.get_server_api_connection() + user = con.get_user() + print("Logged in as user \"{}\"".format(user["name"])) + + +def init_service(*args, **kwargs): + """Initialize current connection from service. + + The service expect specific environment variables. The variables must all + be set to make the connection work as a service. + """ + + ServiceContext.init_service(*args, **kwargs) + + +def get_service_addon_name(): + """Name of addon which initialized service connection. + + Service context must be initialized to be able to use this function. Call + 'init_service' on you service start to do so. + + Returns: + Union[str, None]: Name of addon or None. + """ + + return ServiceContext.addon_name + + +def get_service_addon_version(): + """Version of addon which initialized service connection. + + Service context must be initialized to be able to use this function. Call + 'init_service' on you service start to do so. + + Returns: + Union[str, None]: Version of addon or None. + """ + + return ServiceContext.addon_version + + +def get_service_name(): + """Name of service. + + Service context must be initialized to be able to use this function. Call + 'init_service' on you service start to do so. + + Returns: + Union[str, None]: Name of service if service was registered. + """ + + return ServiceContext.service_name + + +def get_service_addon_settings(): + """Addon settings of service which initialized service. + + Service context must be initialized to be able to use this function. Call + 'init_service' on you service start to do so. + + Returns: + Dict[str, Any]: Addon settings. + + Raises: + ValueError: When service was not initialized. + """ + + addon_name = get_service_addon_name() + addon_version = get_service_addon_version() + if addon_name is None or addon_version is None: + raise ValueError("Service is not initialized") + return get_addon_settings(addon_name, addon_version) + + +def is_connection_created(): + """Is global connection created. + + Returns: + bool: True if connection was connected. + """ + + return GlobalContext.is_connection_created() + + +def create_connection(site_id=None, client_version=None): + """Create global connection. + + Args: + site_id (str): Machine site id/name. + client_version (str): Desktop app version. + + Returns: + GlobalServerAPI: Created connection. + """ + + return GlobalContext.create_connection(site_id, client_version) + + +def close_connection(): + """Close global connection if is connected.""" + + GlobalContext.close_connection() + + +def change_token(url, token): + """Change connection token for url. + + This function can be also used to change url. + + Args: + url (str): Server url. + token (str): API key token. + """ + + GlobalContext.change_token(url, token) + + +def set_environments(url, token): + """Set global environments for global connection. + + Args: + url (Union[str, None]): Url to server or None to unset environments. + token (Union[str, None]): API key token to be used for connection. + """ + + GlobalServerAPI.set_environments(url, token) + + +def get_server_api_connection(): + """Access to global scope object of GlobalServerAPI. + + This access expect to have set environment variables 'AYON_SERVER_URL' + and 'AYON_TOKEN'. + + Returns: + GlobalServerAPI: Object of connection to server. + """ + + return GlobalContext.get_server_api_connection() + + +def get_site_id(): + con = get_server_api_connection() + return con.get_site_id() + + +def set_site_id(site_id): + """Set site id of already connected client connection. + + Site id is human-readable machine id used in AYON desktop application. + + Args: + site_id (Union[str, None]): Site id used in connection. + """ + + con = get_server_api_connection() + con.set_site_id(site_id) + + +def get_client_version(): + """Version of client used to connect to server. + + Client version is AYON client build desktop application. + + Returns: + str: Client version string used in connection. + """ + + con = get_server_api_connection() + return con.get_client_version() + + +def set_client_version(client_version): + """Set version of already connected client connection. + + Client version is version of AYON desktop application. + + Args: + client_version (Union[str, None]): Client version string. + """ + + con = get_server_api_connection() + con.set_client_version(client_version) + + +def get_default_settings_variant(): + """Default variant used for settings. + + Returns: + Union[str, None]: name of variant or None. + """ + + con = get_server_api_connection() + return con.get_client_version() + + +def set_default_settings_variant(variant): + """Change default variant for addon settings. + + Note: + It is recommended to set only 'production' or 'staging' variants + as default variant. + + Args: + variant (Union[str, None]): Settings variant name. + """ + + con = get_server_api_connection() + return con.set_default_settings_variant(variant) + + +def get_base_url(): + con = get_server_api_connection() + return con.get_base_url() + + +def get_rest_url(): + con = get_server_api_connection() + return con.get_rest_url() + + +def raw_get(*args, **kwargs): + con = get_server_api_connection() + return con.raw_get(*args, **kwargs) + + +def raw_post(*args, **kwargs): + con = get_server_api_connection() + return con.raw_post(*args, **kwargs) + + +def raw_put(*args, **kwargs): + con = get_server_api_connection() + return con.raw_put(*args, **kwargs) + + +def raw_patch(*args, **kwargs): + con = get_server_api_connection() + return con.raw_patch(*args, **kwargs) + + +def raw_delete(*args, **kwargs): + con = get_server_api_connection() + return con.raw_delete(*args, **kwargs) + + +def get(*args, **kwargs): + con = get_server_api_connection() + return con.get(*args, **kwargs) + + +def post(*args, **kwargs): + con = get_server_api_connection() + return con.post(*args, **kwargs) + + +def put(*args, **kwargs): + con = get_server_api_connection() + return con.put(*args, **kwargs) + + +def patch(*args, **kwargs): + con = get_server_api_connection() + return con.patch(*args, **kwargs) + + +def delete(*args, **kwargs): + con = get_server_api_connection() + return con.delete(*args, **kwargs) + + +def get_event(*args, **kwargs): + con = get_server_api_connection() + return con.get_event(*args, **kwargs) + + +def get_events(*args, **kwargs): + con = get_server_api_connection() + return con.get_events(*args, **kwargs) + + +def dispatch_event(*args, **kwargs): + con = get_server_api_connection() + return con.dispatch_event(*args, **kwargs) + + +def update_event(*args, **kwargs): + con = get_server_api_connection() + return con.update_event(*args, **kwargs) + + +def enroll_event_job(*args, **kwargs): + con = get_server_api_connection() + return con.enroll_event_job(*args, **kwargs) + + +def download_file(*args, **kwargs): + con = get_server_api_connection() + return con.download_file(*args, **kwargs) + + +def upload_file(*args, **kwargs): + con = get_server_api_connection() + return con.upload_file(*args, **kwargs) + + +def query_graphql(*args, **kwargs): + con = get_server_api_connection() + return con.query_graphql(*args, **kwargs) + + +def get_users(*args, **kwargs): + con = get_server_api_connection() + return con.get_users(*args, **kwargs) + + +def get_user(*args, **kwargs): + con = get_server_api_connection() + return con.get_user(*args, **kwargs) + + +def get_attributes_for_type(*args, **kwargs): + con = get_server_api_connection() + return con.get_attributes_for_type(*args, **kwargs) + + +def get_addons_info(*args, **kwargs): + con = get_server_api_connection() + return con.get_addons_info(*args, **kwargs) + + +def download_addon_private_file(*args, **kwargs): + con = get_server_api_connection() + return con.download_addon_private_file(*args, **kwargs) + + +def get_dependencies_info(*args, **kwargs): + con = get_server_api_connection() + return con.get_dependencies_info(*args, **kwargs) + + +def update_dependency_info(*args, **kwargs): + con = get_server_api_connection() + return con.update_dependency_info(*args, **kwargs) + + +def download_dependency_package(*args, **kwargs): + con = get_server_api_connection() + return con.download_dependency_package(*args, **kwargs) + + +def upload_dependency_package(*args, **kwargs): + con = get_server_api_connection() + return con.upload_dependency_package(*args, **kwargs) + + +def delete_dependency_package(*args, **kwargs): + con = get_server_api_connection() + return con.delete_dependency_package(*args, **kwargs) + + +def get_project_anatomy_presets(*args, **kwargs): + con = get_server_api_connection() + return con.get_project_anatomy_presets(*args, **kwargs) + + +def get_project_anatomy_preset(*args, **kwargs): + con = get_server_api_connection() + return con.get_project_anatomy_preset(*args, **kwargs) + + +def get_project_roots_by_site(*args, **kwargs): + con = get_server_api_connection() + return con.get_project_roots_by_site(*args, **kwargs) + + +def get_project_roots_for_site(*args, **kwargs): + con = get_server_api_connection() + return con.get_project_roots_for_site(*args, **kwargs) + + +def get_addon_settings_schema(*args, **kwargs): + con = get_server_api_connection() + return con.get_addon_settings_schema(*args, **kwargs) + + +def get_addon_site_settings_schema(*args, **kwargs): + con = get_server_api_connection() + return con.get_addon_site_settings_schema(*args, **kwargs) + + +def get_addon_studio_settings(*args, **kwargs): + con = get_server_api_connection() + return con.get_addon_studio_settings(*args, **kwargs) + + +def get_addon_project_settings(*args, **kwargs): + con = get_server_api_connection() + return con.get_addon_project_settings(*args, **kwargs) + + +def get_addon_settings(*args, **kwargs): + con = get_server_api_connection() + return con.get_addon_settings(*args, **kwargs) + + +def get_addon_site_settings(*args, **kwargs): + con = get_server_api_connection() + return con.get_addon_site_settings(*args, **kwargs) + + +def get_addons_studio_settings(*args, **kwargs): + con = get_server_api_connection() + return con.get_addons_studio_settings(*args, **kwargs) + + +def get_addons_project_settings(*args, **kwargs): + con = get_server_api_connection() + return con.get_addons_project_settings(*args, **kwargs) + + +def get_addons_settings(*args, **kwargs): + con = get_server_api_connection() + return con.get_addons_settings(*args, **kwargs) + + +def get_project(*args, **kwargs): + con = get_server_api_connection() + return con.get_project(*args, **kwargs) + + +def get_projects(*args, **kwargs): + con = get_server_api_connection() + return con.get_projects(*args, **kwargs) + + +def get_folders(*args, **kwargs): + con = get_server_api_connection() + return con.get_folders(*args, **kwargs) + + +def get_tasks(*args, **kwargs): + con = get_server_api_connection() + return con.get_tasks(*args, **kwargs) + + +def get_folder_by_id(*args, **kwargs): + con = get_server_api_connection() + return con.get_folder_by_id(*args, **kwargs) + + +def get_folder_by_path(*args, **kwargs): + con = get_server_api_connection() + return con.get_folder_by_path(*args, **kwargs) + + +def get_folder_by_name(*args, **kwargs): + con = get_server_api_connection() + return con.get_folder_by_name(*args, **kwargs) + + +def get_folder_ids_with_subsets(*args, **kwargs): + con = get_server_api_connection() + return con.get_folder_ids_with_subsets(*args, **kwargs) + + +def get_subsets(*args, **kwargs): + con = get_server_api_connection() + return con.get_subsets(*args, **kwargs) + + +def get_subset_by_id(*args, **kwargs): + con = get_server_api_connection() + return con.get_subset_by_id(*args, **kwargs) + + +def get_subset_by_name(*args, **kwargs): + con = get_server_api_connection() + return con.get_subset_by_name(*args, **kwargs) + + +def get_subset_families(*args, **kwargs): + con = get_server_api_connection() + return con.get_subset_families(*args, **kwargs) + + +def get_versions(*args, **kwargs): + con = get_server_api_connection() + return con.get_versions(*args, **kwargs) + + +def get_version_by_id(*args, **kwargs): + con = get_server_api_connection() + return con.get_version_by_id(*args, **kwargs) + + +def get_version_by_name(*args, **kwargs): + con = get_server_api_connection() + return con.get_version_by_name(*args, **kwargs) + + +def get_hero_version_by_id(*args, **kwargs): + con = get_server_api_connection() + return con.get_hero_version_by_id(*args, **kwargs) + + +def get_hero_version_by_subset_id(*args, **kwargs): + con = get_server_api_connection() + return con.get_hero_version_by_subset_id(*args, **kwargs) + + +def get_hero_versions(*args, **kwargs): + con = get_server_api_connection() + return con.get_hero_versions(*args, **kwargs) + + +def get_last_versions(*args, **kwargs): + con = get_server_api_connection() + return con.get_last_versions(*args, **kwargs) + + +def get_last_version_by_subset_id(*args, **kwargs): + con = get_server_api_connection() + return con.get_last_version_by_subset_id(*args, **kwargs) + + +def get_last_version_by_subset_name(*args, **kwargs): + con = get_server_api_connection() + return con.get_last_version_by_subset_name(*args, **kwargs) + + +def version_is_latest(*args, **kwargs): + con = get_server_api_connection() + return con.version_is_latest(*args, **kwargs) + + +def get_representations(*args, **kwargs): + con = get_server_api_connection() + return con.get_representations(*args, **kwargs) + + +def get_representation_by_id(*args, **kwargs): + con = get_server_api_connection() + return con.get_representation_by_id(*args, **kwargs) + + +def get_representation_by_name(*args, **kwargs): + con = get_server_api_connection() + return con.get_representation_by_name(*args, **kwargs) + + +def get_representation_parents(*args, **kwargs): + con = get_server_api_connection() + return con.get_representation_parents(*args, **kwargs) + + +def get_representations_parents(*args, **kwargs): + con = get_server_api_connection() + return con.get_representations_parents(*args, **kwargs) + + +def get_repre_ids_by_context_filters(*args, **kwargs): + con = get_server_api_connection() + return con.get_repre_ids_by_context_filters(*args, **kwargs) + + +def create_project( + project_name, + project_code, + library_project=False, + preset_name=None +): + con = get_server_api_connection() + return con.create_project( + project_name, + project_code, + library_project, + preset_name + ) + + +def delete_project(project_name): + con = get_server_api_connection() + return con.delete_project(project_name) + + +def create_thumbnail(project_name, src_filepath): + con = get_server_api_connection() + return con.create_thumbnail(project_name, src_filepath) + + +def get_thumbnail(project_name, entity_type, entity_id, thumbnail_id=None): + con = get_server_api_connection() + con.get_thumbnail(project_name, entity_type, entity_id, thumbnail_id) + + +def get_folder_thumbnail(project_name, folder_id, thumbnail_id=None): + con = get_server_api_connection() + return con.get_folder_thumbnail(project_name, folder_id, thumbnail_id) + + +def get_version_thumbnail(project_name, version_id, thumbnail_id=None): + con = get_server_api_connection() + return con.get_version_thumbnail(project_name, version_id, thumbnail_id) + + +def get_workfile_thumbnail(project_name, workfile_id, thumbnail_id=None): + con = get_server_api_connection() + return con.get_workfile_thumbnail(project_name, workfile_id, thumbnail_id) + + +def create_thumbnail(project_name, src_filepath): + con = get_server_api_connection() + return con.create_thumbnail(project_name, src_filepath) + + +def get_default_fields_for_type(entity_type): + con = get_server_api_connection() + return con.get_default_fields_for_type(entity_type) diff --git a/openpype/vendor/python/ayon/ayon_api/constants.py b/openpype/vendor/python/ayon/ayon_api/constants.py new file mode 100644 index 0000000000..e431af6f9d --- /dev/null +++ b/openpype/vendor/python/ayon/ayon_api/constants.py @@ -0,0 +1,105 @@ +SERVER_URL_ENV_KEY = "AYON_SERVER_URL" +SERVER_TOKEN_ENV_KEY = "AYON_TOKEN" + +# --- Project --- +DEFAULT_PROJECT_FIELDS = { + "active", + "name", + "code", + "config", + "createdAt", +} + +# --- Folders --- +DEFAULT_FOLDER_FIELDS = { + "id", + "name", + "label", + "folderType", + "path", + "parentId", + "active", + "thumbnailId", +} + +# --- Tasks --- +DEFAULT_TASK_FIELDS = { + "id", + "name", + "label", + "taskType", + "folderId", + "active", + "assignees", +} + +# --- Subsets --- +DEFAULT_SUBSET_FIELDS = { + "id", + "name", + "folderId", + "active", + "family", +} + +# --- Versions --- +DEFAULT_VERSION_FIELDS = { + "id", + "name", + "version", + "subsetId", + "taskId", + "active", + "author", + "thumbnailId", + "createdAt", + "updatedAt", +} + +# --- Representations --- +DEFAULT_REPRESENTATION_FIELDS = { + "id", + "name", + "context", + "createdAt", + "active", + "versionId", +} + +REPRESENTATION_FILES_FIELDS = { + "files.name", + "files.hash", + "files.id", + "files.path", + "files.size", +} + +# --- Workfile info --- +DEFAULT_WORKFILE_INFO_FIELDS = { + "active", + "createdAt", + "createdBy", + "id", + "name", + "path", + "projectName", + "taskId", + "thumbnailId", + "updatedAt", + "updatedBy", +} + +DEFAULT_EVENT_FIELDS = { + "id", + "hash", + "createdAt", + "dependsOn", + "description", + "project", + "retries", + "sender", + "status", + "topic", + "updatedAt", + "user", +} \ No newline at end of file diff --git a/openpype/vendor/python/ayon/ayon_api/entity_hub.py b/openpype/vendor/python/ayon/ayon_api/entity_hub.py new file mode 100644 index 0000000000..76703d2e15 --- /dev/null +++ b/openpype/vendor/python/ayon/ayon_api/entity_hub.py @@ -0,0 +1,1683 @@ +import copy +import collections +from abc import ABCMeta, abstractmethod, abstractproperty + +import six +from ._api import get_server_api_connection +from .utils import create_entity_id, convert_entity_id + +UNKNOWN_VALUE = object() +PROJECT_PARENT_ID = object() +_NOT_SET = object() + + +class EntityHub(object): + """Helper to create, update or remove entities in project. + + The hub is a guide to operation with folder entities and update of project. + Project entity must already exist on server (can be only updated). + + Object is caching entities queried from server. They won't be required once + they were queried, so it is recommended to create new hub or clear cache + frequently. + + Todos: + Listen to server events about entity changes to be able update already + queried entities. + + Args: + project_name (str): Name of project where changes will happen. + connection (ServerAPI): Connection to server with logged user. + allow_data_changes (bool): This option gives ability to change 'data' + key on entities. This is not recommended as 'data' may be use for + secure information and would also slow down server queries. Content + of 'data' key can't be received only GraphQl. + """ + + def __init__( + self, project_name, connection=None, allow_data_changes=False + ): + if not connection: + connection = get_server_api_connection() + self._connection = connection + + self._project_name = project_name + self._entities_by_id = {} + self._entities_by_parent_id = collections.defaultdict(list) + self._project_entity = UNKNOWN_VALUE + + self._allow_data_changes = allow_data_changes + + self._path_reset_queue = None + + @property + def allow_data_changes(self): + """Entity hub allows changes of 'data' key on entities.""" + + return self._allow_data_changes + + @property + def project_name(self): + return self._project_name + + @property + def project_entity(self): + """Project entity.""" + + if self._project_entity is UNKNOWN_VALUE: + self.fill_project_from_server() + return self._project_entity + + def get_attributes_for_type(self, entity_type): + """Get attributes available for a type. + + Attributes are based on entity types. + + Todos: + Use attribute schema to validate values on entities. + + Args: + entity_type (Literal["project", "folder", "task"]): Entity type + for which should be attributes received. + + Returns: + Dict[str, Dict[str, Any]]: Attribute schemas that are available + for entered entity type. + """ + + return self._connection.get_attributes_for_type(entity_type) + + def get_entity_by_id(self, entity_id): + """Receive entity by its id without entity type. + + The entity must be already existing in cached objects. + + Args: + entity_id (str): Id of entity. + + Returns: + Union[BaseEntity, None]: Entity object or None. + """ + + return self._entities_by_id.get(entity_id) + + def get_folder_by_id(self, entity_id, allow_query=True): + """Get folder entity by id. + + Args: + entity_id (str): Id of folder entity. + allow_query (bool): Try to query entity from server if is not + available in cache. + + Returns: + Union[FolderEntity, None]: Object of folder or 'None'. + """ + + if allow_query: + return self.get_or_query_entity_by_id(entity_id, ["folder"]) + return self._entities_by_id.get(entity_id) + + def get_task_by_id(self, entity_id, allow_query=True): + """Get task entity by id. + + Args: + entity_id (str): Id of task entity. + allow_query (bool): Try to query entity from server if is not + available in cache. + + Returns: + Union[TaskEntity, None]: Object of folder or 'None'. + """ + + if allow_query: + return self.get_or_query_entity_by_id(entity_id, ["task"]) + return self._entities_by_id.get(entity_id) + + def get_or_query_entity_by_id(self, entity_id, entity_types): + """Get or query entity based on it's id and possible entity types. + + This is a helper function when entity id is known but entity type may + have multiple possible options. + + Args: + entity_id (str): Entity id. + entity_types (Iterable[str]): Possible entity types that can the id + represent. e.g. '["folder", "project"]' + """ + + existing_entity = self._entities_by_id.get(entity_id) + if existing_entity is not None: + return existing_entity + + if not entity_types: + return None + + entity_data = None + for entity_type in entity_types: + if entity_type == "folder": + entity_data = self._connection.get_folder_by_id( + self.project_name, + entity_id, + fields=self._get_folder_fields(), + own_attributes=True + ) + elif entity_type == "task": + entity_data = self._connection.get_task_by_id( + self.project_name, + entity_id, + own_attributes=True + ) + else: + raise ValueError( + "Unknonwn entity type \"{}\"".format(entity_type) + ) + + if entity_data: + break + + if not entity_data: + return None + + if entity_type == "folder": + return self.add_folder(entity_data) + elif entity_type == "task": + return self.add_task(entity_data) + + return None + + @property + def entities(self): + for entity in self._entities_by_id.values(): + yield entity + + def add_new_folder(self, *args, created=True, **kwargs): + """Create folder object and add it to entity hub. + + Args: + parent (Union[ProjectEntity, FolderEntity]): Parent of added + folder. + + Returns: + FolderEntity: Added folder entity. + """ + + folder_entity = FolderEntity( + *args, **kwargs, created=created, entity_hub=self + ) + self.add_entity(folder_entity) + return folder_entity + + def add_new_task(self, *args, created=True, **kwargs): + task_entity = TaskEntity( + *args, **kwargs, created=created, entity_hub=self + ) + self.add_entity(task_entity) + return task_entity + + def add_folder(self, folder): + """Create folder object and add it to entity hub. + + Args: + folder (Dict[str, Any]): Folder entity data. + + Returns: + FolderEntity: Added folder entity. + """ + + folder_entity = FolderEntity.from_entity_data(folder, entity_hub=self) + self.add_entity(folder_entity) + return folder_entity + + def add_task(self, task): + """Create task object and add it to entity hub. + + Args: + task (Dict[str, Any]): Task entity data. + + Returns: + TaskEntity: Added task entity. + """ + + task_entity = TaskEntity.from_entity_data(task, entity_hub=self) + self.add_entity(task_entity) + return task_entity + + def add_entity(self, entity): + """Add entity to hub cache. + + Args: + entity (BaseEntity): Entity that should be added to hub's cache. + """ + + self._entities_by_id[entity.id] = entity + parent_children = self._entities_by_parent_id[entity.parent_id] + if entity not in parent_children: + parent_children.append(entity) + + if entity.parent_id is PROJECT_PARENT_ID: + return + + parent = self._entities_by_id.get(entity.parent_id) + if parent is not None: + parent.add_child(entity.id) + + def folder_path_reseted(self, folder_id): + """Method called from 'FolderEntity' on path reset. + + This should reset cache of folder paths on all children entities. + + The path cache is always propagated from top to bottom so if an entity + has not cached path it means that any children can't have it cached. + """ + + if self._path_reset_queue is not None: + self._path_reset_queue.append(folder_id) + return + + self._path_reset_queue = collections.deque() + self._path_reset_queue.append(folder_id) + while self._path_reset_queue: + children = self._entities_by_parent_id[folder_id] + for child in children: + # Get child path but don't trigger cache + path = child.get_path(False) + if path is not None: + # Reset it's path cache if is set + child.reset_path() + else: + self._path_reset_queue.append(child.id) + + self._path_reset_queue = None + + def unset_entity_parent(self, entity_id, parent_id): + entity = self._entities_by_id.get(entity_id) + parent = self._entities_by_id.get(parent_id) + children_ids = UNKNOWN_VALUE + if parent is not None: + children_ids = parent.get_children_ids(False) + + has_set_parent = False + if entity is not None: + has_set_parent = entity.parent_id == parent_id + + new_parent_id = None + if has_set_parent: + entity.parent_id = new_parent_id + + if children_ids is not UNKNOWN_VALUE and entity_id in children_ids: + parent.remove_child(entity_id) + + if entity is None or not has_set_parent: + self.reset_immutable_for_hierarchy_cache(parent_id) + return + + orig_parent_children = self._entities_by_parent_id[parent_id] + if entity in orig_parent_children: + orig_parent_children.remove(entity) + + new_parent_children = self._entities_by_parent_id[new_parent_id] + if entity not in new_parent_children: + new_parent_children.append(entity) + self.reset_immutable_for_hierarchy_cache(parent_id) + + def set_entity_parent(self, entity_id, parent_id, orig_parent_id=_NOT_SET): + parent = self._entities_by_id.get(parent_id) + entity = self._entities_by_id.get(entity_id) + if entity is None: + if parent is not None: + children_ids = parent.get_children_ids(False) + if ( + children_ids is not UNKNOWN_VALUE + and entity_id in children_ids + ): + parent.remove_child(entity_id) + self.reset_immutable_for_hierarchy_cache(parent.id) + return + + if orig_parent_id is _NOT_SET: + orig_parent_id = entity.parent_id + if orig_parent_id == parent_id: + return + + orig_parent_children = self._entities_by_parent_id[orig_parent_id] + if entity in orig_parent_children: + orig_parent_children.remove(entity) + self.reset_immutable_for_hierarchy_cache(orig_parent_id) + + orig_parent = self._entities_by_id.get(orig_parent_id) + if orig_parent is not None: + orig_parent.remove_child(entity_id) + + parent_children = self._entities_by_parent_id[parent_id] + if entity not in parent_children: + parent_children.append(entity) + + entity.parent_id = parent_id + if parent is None or parent.get_children_ids(False) is UNKNOWN_VALUE: + return + + parent.add_child(entity_id) + self.reset_immutable_for_hierarchy_cache(parent_id) + + def _query_entity_children(self, entity): + folder_fields = self._get_folder_fields() + tasks = [] + folders = [] + if entity.entity_type == "project": + folders = list(self._connection.get_folders( + entity["name"], + parent_ids=[entity.id], + fields=folder_fields, + own_attributes=True + )) + + elif entity.entity_type == "folder": + folders = list(self._connection.get_folders( + self.project_entity["name"], + parent_ids=[entity.id], + fields=folder_fields, + own_attributes=True + )) + + tasks = list(self._connection.get_tasks( + self.project_entity["name"], + folder_ids=[entity.id], + own_attributes=True + )) + + children_ids = { + child.id + for child in self._entities_by_parent_id[entity.id] + } + for folder in folders: + folder_entity = self._entities_by_id.get(folder["id"]) + if folder_entity is not None: + if folder_entity.parent_id == entity.id: + children_ids.add(folder_entity.id) + continue + + folder_entity = self.add_folder(folder) + children_ids.add(folder_entity.id) + + for task in tasks: + task_entity = self._entities_by_id.get(task["id"]) + if task_entity is not None: + if task_entity.parent_id == entity.id: + children_ids.add(task_entity.id) + continue + + task_entity = self.add_task(task) + children_ids.add(task_entity.id) + + entity.fill_children_ids(children_ids) + + def get_entity_children(self, entity, allow_query=True): + children_ids = entity.get_children_ids(allow_query=False) + if children_ids is not UNKNOWN_VALUE: + return entity.get_children() + + if children_ids is UNKNOWN_VALUE and not allow_query: + return UNKNOWN_VALUE + + self._query_entity_children(entity) + + return entity.get_children() + + def delete_entity(self, entity): + parent_id = entity.parent_id + if parent_id is None: + return + + parent = self._entities_by_parent_id.get(parent_id) + if parent is not None: + parent.remove_child(entity.id) + + def reset_immutable_for_hierarchy_cache( + self, entity_id, bottom_to_top=True + ): + if bottom_to_top is None or entity_id is None: + return + + reset_queue = collections.deque() + reset_queue.append(entity_id) + if bottom_to_top: + while reset_queue: + entity_id = reset_queue.popleft() + entity = self.get_entity_by_id(entity_id) + if entity is None: + continue + entity.reset_immutable_for_hierarchy_cache(None) + reset_queue.append(entity.parent_id) + else: + while reset_queue: + entity_id = reset_queue.popleft() + entity = self.get_entity_by_id(entity_id) + if entity is None: + continue + entity.reset_immutable_for_hierarchy_cache(None) + for child in self._entities_by_parent_id[entity.id]: + reset_queue.append(child.id) + + def fill_project_from_server(self): + """Query project from server and create it's entity. + + Returns: + ProjectEntity: Entity that was created based on queried data. + + Raises: + ValueError: When project was not found on server. + """ + + project_name = self.project_name + project = self._connection.get_project( + project_name, + own_attributes=True + ) + if not project: + raise ValueError( + "Project \"{}\" was not found.".format(project_name) + ) + + self._project_entity = ProjectEntity( + project["code"], + parent_id=PROJECT_PARENT_ID, + entity_id=project["name"], + library=project["library"], + folder_types=project["folderTypes"], + task_types=project["taskTypes"], + name=project["name"], + attribs=project["ownAttrib"], + data=project["data"], + active=project["active"], + entity_hub=self + ) + self.add_entity(self._project_entity) + return self._project_entity + + def _get_folder_fields(self): + folder_fields = set( + self._connection.get_default_fields_for_type("folder") + ) + folder_fields.add("hasSubsets") + if self._allow_data_changes: + folder_fields.add("data") + return folder_fields + + def query_entities_from_server(self): + """Query whole project at once.""" + + project_entity = self.fill_project_from_server() + + folder_fields = self._get_folder_fields() + + folders = self._connection.get_folders( + project_entity.name, + fields=folder_fields, + own_attributes=True + ) + tasks = self._connection.get_tasks( + project_entity.name, + own_attributes=True + ) + folders_by_parent_id = collections.defaultdict(list) + for folder in folders: + parent_id = folder["parentId"] + folders_by_parent_id[parent_id].append(folder) + + tasks_by_parent_id = collections.defaultdict(list) + for task in tasks: + parent_id = task["folderId"] + tasks_by_parent_id[parent_id].append(task) + + hierarchy_queue = collections.deque() + hierarchy_queue.append((None, project_entity)) + while hierarchy_queue: + item = hierarchy_queue.popleft() + parent_id, parent_entity = item + + children_ids = set() + for folder in folders_by_parent_id[parent_id]: + folder_entity = self.add_folder(folder) + children_ids.add(folder_entity.id) + folder_entity.has_published_content = folder["hasSubsets"] + hierarchy_queue.append((folder_entity.id, folder_entity)) + + for task in tasks_by_parent_id[parent_id]: + task_entity = self.add_task(task) + children_ids.add(task_entity.id) + + parent_entity.fill_children_ids(children_ids) + self.lock() + + def lock(self): + if self._project_entity is None: + return + + for entity in self._entities_by_id.values(): + entity.lock() + + def _get_top_entities(self): + all_ids = set(self._entities_by_id.keys()) + return [ + entity + for entity in self._entities_by_id.values() + if entity.parent_id not in all_ids + ] + + def _split_entities(self): + top_entities = self._get_top_entities() + entities_queue = collections.deque(top_entities) + removed_entity_ids = [] + created_entity_ids = [] + other_entity_ids = [] + while entities_queue: + entity = entities_queue.popleft() + removed = entity.removed + if removed: + removed_entity_ids.append(entity.id) + elif entity.created: + created_entity_ids.append(entity.id) + else: + other_entity_ids.append(entity.id) + + for child in tuple(self._entities_by_parent_id[entity.id]): + if removed: + self.unset_entity_parent(child.id, entity.id) + entities_queue.append(child) + return created_entity_ids, other_entity_ids, removed_entity_ids + + def _get_update_body(self, entity, changes=None): + if changes is None: + changes = entity.changes + + if not changes: + return None + return { + "type": "update", + "entityType": entity.entity_type, + "entityId": entity.id, + "data": changes + } + + def _get_create_body(self, entity): + return { + "type": "create", + "entityType": entity.entity_type, + "entityId": entity.id, + "data": entity.to_create_body_data() + } + + def _get_delete_body(self, entity): + return { + "type": "delete", + "entityType": entity.entity_type, + "entityId": entity.id + } + + def commit_changes(self): + """Commit any changes that happened on entities. + + Todos: + Use Operations Session instead of known operations body. + """ + + project_changes = self.project_entity.changes + if project_changes: + response = self._connection.patch( + "projects/{}".format(self.project_name), + **project_changes + ) + if response.status_code != 204: + raise ValueError("Failed to update project") + + self.project_entity.lock() + + operations_body = [] + + created_entity_ids, other_entity_ids, removed_entity_ids = ( + self._split_entities() + ) + processed_ids = set() + for entity_id in other_entity_ids: + if entity_id in processed_ids: + continue + + entity = self._entities_by_id[entity_id] + changes = entity.changes + processed_ids.add(entity_id) + if not changes: + continue + + bodies = [self._get_update_body(entity, changes)] + # Parent was created and was not yet added to operations body + parent_queue = collections.deque() + parent_queue.append(entity.parent_id) + while parent_queue: + # Make sure entity's parents are created + parent_id = parent_queue.popleft() + if ( + parent_id is UNKNOWN_VALUE + or parent_id in processed_ids + or parent_id not in created_entity_ids + ): + continue + + parent = self._entities_by_id.get(parent_id) + processed_ids.add(parent.id) + bodies.append(self._get_create_body(parent)) + parent_queue.append(parent.id) + + operations_body.extend(reversed(bodies)) + + for entity_id in created_entity_ids: + if entity_id in processed_ids: + continue + entity = self._entities_by_id[entity_id] + processed_ids.add(entity_id) + operations_body.append(self._get_create_body(entity)) + + for entity_id in reversed(removed_entity_ids): + if entity_id in processed_ids: + continue + + entity = self._entities_by_id.pop(entity_id) + parent_children = self._entities_by_parent_id[entity.parent_id] + if entity in parent_children: + parent_children.remove(entity) + + if not entity.created: + operations_body.append(self._get_delete_body(entity)) + + self._connection.send_batch_operations( + self.project_name, operations_body + ) + + self.lock() + + +class AttributeValue(object): + def __init__(self, value): + self._value = value + self._origin_value = copy.deepcopy(value) + + def get_value(self): + return self._value + + def set_value(self, value): + self._value = value + + value = property(get_value, set_value) + + @property + def changed(self): + return self._value != self._origin_value + + def lock(self): + self._origin_value = copy.deepcopy(self._value) + + +class Attributes(object): + """Object representing attribs of entity. + + Todos: + This could be enhanced to know attribute schema and validate values + based on the schema. + + Args: + attrib_keys (Iterable[str]): Keys that are available in attribs of the + entity. + values (Union[None, Dict[str, Any]]): Values of attributes. + """ + + def __init__(self, attrib_keys, values=UNKNOWN_VALUE): + if values in (UNKNOWN_VALUE, None): + values = {} + self._attributes = { + key: AttributeValue(values.get(key)) + for key in attrib_keys + } + + def __contains__(self, key): + return key in self._attributes + + def __getitem__(self, key): + return self._attributes[key].value + + def __setitem__(self, key, value): + self._attributes[key].set_value(value) + + def __iter__(self): + for key in self._attributes: + yield key + + def keys(self): + return self._attributes.keys() + + def values(self): + for attribute in self._attributes.values(): + yield attribute.value + + def items(self): + for key, attribute in self._attributes.items(): + yield key, attribute.value + + def get(self, key, default=None): + """Get value of attribute. + + Args: + key (str): Attribute name. + default (Any): Default value to return when attribute was not + found. + """ + + attribute = self._attributes.get(key) + if attribute is None: + return default + return attribute.value + + def set(self, key, value): + """Change value of attribute. + + Args: + key (str): Attribute name. + value (Any): New value of the attribute. + """ + + self[key] = value + + def get_attribute(self, key): + """Access to attribute object. + + Args: + key (str): Name of attribute. + + Returns: + AttributeValue: Object of attribute value. + + Raises: + KeyError: When attribute is not available. + """ + + return self._attributes[key] + + def lock(self): + for attribute in self._attributes.values(): + attribute.lock() + + @property + def changes(self): + """Attribute value changes. + + Returns: + Dict[str, Any]: Key mapping with new values. + """ + + return { + attr_key: attribute.value + for attr_key, attribute in self._attributes.items() + if attribute.changed + } + + def to_dict(self, ignore_none=True): + output = {} + for key, value in self.items(): + if ( + value is UNKNOWN_VALUE + or (ignore_none and value is None) + ): + continue + + output[key] = value + return output + + +@six.add_metaclass(ABCMeta) +class BaseEntity(object): + """Object representation of entity from server which is capturing changes. + + All data on created object are expected as "current data" on server entity + unless the entity has set 'created' to 'True'. So if new data should be + stored to server entity then fill entity with server data first and + then change them. + + Calling 'lock' method will mark entity as "saved" and all changes made on + entity are set as "current data" on server. + + Args: + name (str): Name of entity. + attribs (Dict[str, Any]): Attribute values. + data (Dict[str, Any]): Entity data (custom data). + parent_id (Union[str, None]): Id of parent entity. + entity_id (Union[str, None]): Id of the entity. New id is created if + not passed. + thumbnail_id (Union[str, None]): Id of entity's thumbnail. + active (bool): Is entity active. + entity_hub (EntityHub): Object of entity hub which created object of + the entity. + created (Union[bool, None]): Entity is new. When 'None' is passed the + value is defined based on value of 'entity_id'. + """ + + def __init__( + self, + entity_id=None, + parent_id=UNKNOWN_VALUE, + name=UNKNOWN_VALUE, + attribs=UNKNOWN_VALUE, + data=UNKNOWN_VALUE, + thumbnail_id=UNKNOWN_VALUE, + active=UNKNOWN_VALUE, + entity_hub=None, + created=None + ): + if entity_hub is None: + raise ValueError("Missing required kwarg 'entity_hub'") + + self._entity_hub = entity_hub + + if created is None: + created = entity_id is None + + entity_id = self._prepare_entity_id(entity_id) + + if data is None: + data = {} + + children_ids = UNKNOWN_VALUE + if created: + children_ids = set() + + if not created and parent_id is UNKNOWN_VALUE: + raise ValueError("Existing entity is missing parent id.") + + # These are public without any validation at this moment + # may change in future (e.g. name will have regex validation) + self._entity_id = entity_id + + self._parent_id = parent_id + self._name = name + self.active = active + self._created = created + self._thumbnail_id = thumbnail_id + self._attribs = Attributes( + self._get_attributes_for_type(self.entity_type), + attribs + ) + self._data = data + self._children_ids = children_ids + + self._orig_parent_id = parent_id + self._orig_name = name + self._orig_data = copy.deepcopy(data) + self._orig_thumbnail_id = thumbnail_id + self._orig_active = active + + self._immutable_for_hierarchy_cache = None + + def __repr__(self): + return "<{} - {}>".format(self.__class__.__name__, self.id) + + def __getitem__(self, item): + return getattr(self, item) + + def __setitem__(self, item, value): + return setattr(self, item, value) + + def _prepare_entity_id(self, entity_id): + entity_id = convert_entity_id(entity_id) + if entity_id is None: + entity_id = create_entity_id() + return entity_id + + @property + def id(self): + """Access to entity id under which is entity available on server. + + Returns: + str: Entity id. + """ + + return self._entity_id + + @property + def removed(self): + return self._parent_id is None + + @property + def orig_parent_id(self): + return self._orig_parent_id + + @property + def attribs(self): + """Entity attributes based on server configuration. + + Returns: + Attributes: Attributes object handling changes and values of + attributes on entity. + """ + + return self._attribs + + @property + def data(self): + """Entity custom data that are not stored by any deterministic model. + + Be aware that 'data' can't be queried using GraphQl and cannot be + updated partially. + + Returns: + Dict[str, Any]: Custom data on entity. + """ + + return self._data + + @property + def project_name(self): + """Quick access to project from entity hub. + + Returns: + str: Name of project under which entity lives. + """ + + return self._entity_hub.project_name + + @abstractproperty + def entity_type(self): + """Entity type coresponding to server. + + Returns: + Literal[project, folder, task]: Entity type. + """ + + pass + + @abstractproperty + def parent_entity_types(self): + """Entity type coresponding to server. + + Returns: + Iterable[str]: Possible entity types of parent. + """ + + pass + + @abstractproperty + def changes(self): + """Receive entity changes. + + Returns: + Union[Dict[str, Any], None]: All values that have changed on + entity. New entity must return None. + """ + + pass + + @classmethod + @abstractmethod + def from_entity_data(cls, entity_data, entity_hub): + """Create entity based on queried data from server. + + Args: + entity_data (Dict[str, Any]): Entity data from server. + entity_hub (EntityHub): Hub which handle the entity. + + Returns: + BaseEntity: Object of the class. + """ + + pass + + @abstractmethod + def to_create_body_data(self): + """Convert object of entity to data for server on creation. + + Returns: + Dict[str, Any]: Entity data. + """ + + pass + + @property + def immutable_for_hierarchy(self): + """Entity is immutable for hierarchy changes. + + Hierarchy changes can be considered as change of name or parents. + + Returns: + bool: Entity is immutable for hierarchy changes. + """ + + if self._immutable_for_hierarchy_cache is not None: + return self._immutable_for_hierarchy_cache + + immutable_for_hierarchy = self._immutable_for_hierarchy + if immutable_for_hierarchy is not None: + self._immutable_for_hierarchy_cache = immutable_for_hierarchy + return self._immutable_for_hierarchy_cache + + for child in self._entity_hub.get_entity_children(self): + if child.immutable_for_hierarchy: + self._immutable_for_hierarchy_cache = True + return self._immutable_for_hierarchy_cache + + self._immutable_for_hierarchy_cache = False + return self._immutable_for_hierarchy_cache + + @property + def _immutable_for_hierarchy(self): + """Override this method to define if entity object is immutable. + + This property was added to define immutable state of Folder entities + which is used in property 'immutable_for_hierarchy'. + + Returns: + Union[bool, None]: Bool to explicitly telling if is immutable or + not otherwise None. + """ + + return None + + @property + def has_cached_immutable_hierarchy(self): + return self._immutable_for_hierarchy_cache is not None + + def reset_immutable_for_hierarchy_cache(self, bottom_to_top=True): + """Clear cache of immutable hierarchy property. + + This is used when entity changed parent or a child was added. + + Args: + bottom_to_top (bool): Reset cache from top hierarchy to bottom or + from bottom hierarchy to top. + """ + + self._immutable_for_hierarchy_cache = None + self._entity_hub.reset_immutable_for_hierarchy_cache( + self.id, bottom_to_top + ) + + def _get_default_changes(self): + """Collect changes of common data on entity. + + Returns: + Dict[str, Any]: Changes on entity. Key and it's new value. + """ + + changes = {} + if self._orig_name != self._name: + changes["name"] = self._name + + if self._entity_hub.allow_data_changes: + if self._orig_data != self._data: + changes["data"] = self._data + + if self._orig_thumbnail_id != self._thumbnail_id: + changes["thumbnailId"] = self._thumbnail_id + + if self._orig_active != self.active: + changes["active"] = self.active + + attrib_changes = self.attribs.changes + if attrib_changes: + changes["attrib"] = attrib_changes + return changes + + def _get_attributes_for_type(self, entity_type): + return self._entity_hub.get_attributes_for_type(entity_type) + + def lock(self): + """Lock entity as 'saved' so all changes are discarded.""" + + self._orig_parent_id = self._parent_id + self._orig_name = self._name + self._orig_data = copy.deepcopy(self._data) + self._orig_thumbnail_id = self.thumbnail_id + self._attribs.lock() + + self._immutable_for_hierarchy_cache = None + + def _get_entity_by_id(self, entity_id): + return self._entity_hub.get_entity_by_id(entity_id) + + def get_name(self): + return self._name + + def set_name(self, name): + self._name = name + + name = property(get_name, set_name) + + def get_parent_id(self): + """Parent entity id. + + Returns: + Union[str, None]: Id of parent entity or none if is not set. + """ + + return self._parent_id + + def set_parent_id(self, parent_id): + """Change parent by id. + + Args: + parent_id (Union[str, None]): Id of new parent for entity. + + Raises: + ValueError: If parent was not found by id. + TypeError: If validation of parent does not pass. + """ + + if parent_id != self._parent_id: + orig_parent_id = self._parent_id + self._parent_id = parent_id + self._entity_hub.set_entity_parent( + self.id, parent_id, orig_parent_id + ) + + parent_id = property(get_parent_id, set_parent_id) + + def get_parent(self, allow_query=True): + """Parent entity. + + Returns: + Union[BaseEntity, None]: Parent object. + """ + + parent = self._entity_hub.get_entity_by_id(self._parent_id) + if parent is not None: + return parent + + if not allow_query: + return self._parent_id + + if self._parent_id is UNKNOWN_VALUE: + return self._parent_id + + return self._entity_hub.get_or_query_entity_by_id( + self._parent_id, self.parent_entity_types + ) + + def set_parent(self, parent): + """Change parent object. + + Args: + parent (BaseEntity): New parent for entity. + + Raises: + TypeError: If validation of parent does not pass. + """ + + parent_id = None + if parent is not None: + parent_id = parent.id + self._entity_hub.set_entity_parent(self.id, parent_id) + + parent = property(get_parent, set_parent) + + def get_children_ids(self, allow_query=True): + """Access to children objects. + + Todos: + Children should be maybe handled by EntityHub instead of entities + themselves. That would simplify 'set_entity_parent', + 'unset_entity_parent' and other logic related to changing + hierarchy. + + Returns: + Union[List[str], Type[UNKNOWN_VALUE]]: Children iterator. + """ + + if self._children_ids is UNKNOWN_VALUE: + if not allow_query: + return self._children_ids + self._entity_hub.get_entity_children(self, True) + return set(self._children_ids) + + children_ids = property(get_children_ids) + + def get_children(self, allow_query=True): + """Access to children objects. + + Returns: + Union[List[BaseEntity], Type[UNKNOWN_VALUE]]: Children iterator. + """ + + if self._children_ids is UNKNOWN_VALUE: + if not allow_query: + return self._children_ids + return self._entity_hub.get_entity_children(self, True) + + return [ + self._entity_hub.get_entity_by_id(children_id) + for children_id in self._children_ids + ] + + children = property(get_children) + + def add_child(self, child): + """Add child entity. + + Args: + child (BaseEntity): Child object to add. + + Raises: + TypeError: When child object has invalid type to be children. + """ + + child_id = child + if isinstance(child_id, BaseEntity): + child_id = child.id + + if self._children_ids is not UNKNOWN_VALUE: + self._children_ids.add(child_id) + + self._entity_hub.set_entity_parent(child_id, self.id) + + def remove_child(self, child): + """Remove child entity. + + Is ignored if child is not in children. + + Args: + child (Union[str, BaseEntity]): Child object or child id to remove. + """ + + child_id = child + if isinstance(child_id, BaseEntity): + child_id = child.id + + if self._children_ids is not UNKNOWN_VALUE: + self._children_ids.discard(child_id) + self._entity_hub.unset_entity_parent(child_id, self.id) + + def get_thumbnail_id(self): + """Thumbnail id of entity. + + Returns: + Union[str, None]: Id of parent entity or none if is not set. + """ + + return self._thumbnail_id + + def set_thumbnail_id(self, thumbnail_id): + """Change thumbnail id. + + Args: + thumbnail_id (Union[str, None]): Id of thumbnail for entity. + """ + + self._thumbnail_id = thumbnail_id + + thumbnail_id = property(get_thumbnail_id, set_thumbnail_id) + + @property + def created(self): + """Entity is new. + + Returns: + bool: Entity is newly created. + """ + + return self._created + + def fill_children_ids(self, children_ids): + """Fill children ids on entity. + + Warning: + This is not an api call but is called from entity hub. + """ + + self._children_ids = set(children_ids) + + +class ProjectEntity(BaseEntity): + entity_type = "project" + parent_entity_types = [] + # TODO These are hardcoded but maybe should be used from server??? + default_folder_type_icon = "folder" + default_task_type_icon = "task_alt" + + def __init__( + self, project_code, library, folder_types, task_types, *args, **kwargs + ): + super(ProjectEntity, self).__init__(*args, **kwargs) + + self._project_code = project_code + self._library_project = library + self._folder_types = folder_types + self._task_types = task_types + + self._orig_project_code = project_code + self._orig_library_project = library + self._orig_folder_types = copy.deepcopy(folder_types) + self._orig_task_types = copy.deepcopy(task_types) + + def _prepare_entity_id(self, entity_id): + if entity_id != self.project_name: + raise ValueError( + "Unexpected entity id value \"{}\". Expected \"{}\"".format( + entity_id, self.project_name)) + return entity_id + + def get_parent(self, *args, **kwargs): + return None + + def set_parent(self, parent): + raise ValueError( + "Parent of project cannot be set to {}".format(parent) + ) + + parent = property(get_parent, set_parent) + + def get_folder_types(self): + return copy.deepcopy(self._folder_types) + + def set_folder_types(self, folder_types): + new_folder_types = [] + for folder_type in folder_types: + if "icon" not in folder_type: + folder_type["icon"] = self.default_folder_type_icon + new_folder_types.append(folder_type) + self._folder_types = new_folder_types + + def get_task_types(self): + return copy.deepcopy(self._task_types) + + def set_task_types(self, task_types): + new_task_types = [] + for task_type in task_types: + if "icon" not in task_type: + task_type["icon"] = self.default_task_type_icon + new_task_types.append(task_type) + self._task_types = new_task_types + + folder_types = property(get_folder_types, set_folder_types) + task_types = property(get_task_types, set_task_types) + + def lock(self): + super(ProjectEntity, self).lock() + self._orig_folder_types = copy.deepcopy(self._folder_types) + self._orig_task_types = copy.deepcopy(self._task_types) + + @property + def changes(self): + changes = self._get_default_changes() + if self._orig_folder_types != self._folder_types: + changes["folderTypes"] = self.get_folder_types() + + if self._orig_task_types != self._task_types: + changes["taskTypes"] = self.get_task_types() + + return changes + + @classmethod + def from_entity_data(cls, project, entity_hub): + return cls( + project["code"], + parent_id=PROJECT_PARENT_ID, + entity_id=project["name"], + library=project["library"], + folder_types=project["folderTypes"], + task_types=project["taskTypes"], + name=project["name"], + attribs=project["ownAttrib"], + data=project["data"], + active=project["active"], + entity_hub=entity_hub + ) + + def to_create_body_data(self): + raise NotImplementedError( + "ProjectEntity does not support conversion to entity data" + ) + + +class FolderEntity(BaseEntity): + entity_type = "folder" + parent_entity_types = ["folder", "project"] + + def __init__(self, folder_type, *args, label=None, path=None, **kwargs): + super(FolderEntity, self).__init__(*args, **kwargs) + # Autofill project as parent of folder if is not yet set + # - this can be guessed only if folder was just created + if self.created and self._parent_id is UNKNOWN_VALUE: + self._parent_id = self.project_name + + self._folder_type = folder_type + self._label = label + + self._orig_folder_type = folder_type + self._orig_label = label + # Know if folder has any subsets + # - is used to know if folder allows hierarchy changes + self._has_published_content = False + self._path = path + + def get_folder_type(self): + return self._folder_type + + def set_folder_type(self, folder_type): + self._folder_type = folder_type + + folder_type = property(get_folder_type, set_folder_type) + + def get_label(self): + return self._label + + def set_label(self, label): + self._label = label + + label = property(get_label, set_label) + + def get_path(self, dynamic_value=True): + if not dynamic_value: + return self._path + + if self._path is None: + parent = self.parent + path = self.name + if parent.entity_type == "folder": + parent_path = parent.path + path = "/".join([parent_path, path]) + self._path = path + return self._path + + def reset_path(self): + self._path = None + self._entity_hub.folder_path_reseted(self.id) + + path = property(get_path) + + def get_has_published_content(self): + return self._has_published_content + + def set_has_published_content(self, has_published_content): + if self._has_published_content is has_published_content: + return + + self._has_published_content = has_published_content + # Reset immutable cache of parents + self._entity_hub.reset_immutable_for_hierarchy_cache(self.id) + + has_published_content = property( + get_has_published_content, set_has_published_content + ) + + @property + def _immutable_for_hierarchy(self): + if self.has_published_content: + return True + return None + + def lock(self): + super(FolderEntity, self).lock() + self._orig_folder_type = self._folder_type + + @property + def changes(self): + changes = self._get_default_changes() + + if self._orig_parent_id != self._parent_id: + parent_id = self._parent_id + if parent_id == self.project_name: + parent_id = None + changes["parentId"] = parent_id + + if self._orig_folder_type != self._folder_type: + changes["folderType"] = self._folder_type + + label = self._label + if self._name == label: + label = None + + if label != self._orig_label: + changes["label"] = label + + return changes + + @classmethod + def from_entity_data(cls, folder, entity_hub): + parent_id = folder["parentId"] + if parent_id is None: + parent_id = entity_hub.project_entity.id + return cls( + folder["folderType"], + label=folder["label"], + path=folder["path"], + entity_id=folder["id"], + parent_id=parent_id, + name=folder["name"], + data=folder.get("data"), + attribs=folder["ownAttrib"], + active=folder["active"], + thumbnail_id=folder["thumbnailId"], + created=False, + entity_hub=entity_hub + ) + + def to_create_body_data(self): + parent_id = self._parent_id + if parent_id is UNKNOWN_VALUE: + raise ValueError("Folder does not have set 'parent_id'") + + if parent_id == self.project_name: + parent_id = None + + if not self.name or self.name is UNKNOWN_VALUE: + raise ValueError("Folder does not have set 'name'") + + output = { + "name": self.name, + "folderType": self.folder_type, + "parentId": parent_id, + } + attrib = self.attribs.to_dict() + if attrib: + output["attrib"] = attrib + + if self.active is not UNKNOWN_VALUE: + output["active"] = self.active + + if self.thumbnail_id is not UNKNOWN_VALUE: + output["thumbnailId"] = self.thumbnail_id + + if self._entity_hub.allow_data_changes: + output["data"] = self._data + return output + + +class TaskEntity(BaseEntity): + entity_type = "task" + parent_entity_types = ["folder"] + + def __init__(self, task_type, *args, label=None, **kwargs): + super(TaskEntity, self).__init__(*args, **kwargs) + + self._task_type = task_type + self._label = label + + self._orig_task_type = task_type + self._orig_label = label + + self._children_ids = set() + + def lock(self): + super(TaskEntity, self).lock() + self._orig_task_type = self._task_type + + def get_task_type(self): + return self._task_type + + def set_task_type(self, task_type): + self._task_type = task_type + + task_type = property(get_task_type, set_task_type) + + def get_label(self): + return self._label + + def set_label(self, label): + self._label = label + + label = property(get_label, set_label) + + def add_child(self, child): + raise ValueError("Task does not support to add children") + + @property + def changes(self): + changes = self._get_default_changes() + + if self._orig_parent_id != self._parent_id: + changes["folderId"] = self._parent_id + + if self._orig_task_type != self._task_type: + changes["taskType"] = self._task_type + + label = self._label + if self._name == label: + label = None + + if label != self._orig_label: + changes["label"] = label + + return changes + + @classmethod + def from_entity_data(cls, task, entity_hub): + return cls( + task["taskType"], + entity_id=task["id"], + label=task["label"], + parent_id=task["folderId"], + name=task["name"], + data=task.get("data"), + attribs=task["ownAttrib"], + active=task["active"], + created=False, + entity_hub=entity_hub + ) + + def to_create_body_data(self): + if self.parent_id is UNKNOWN_VALUE: + raise ValueError("Task does not have set 'parent_id'") + + output = { + "name": self.name, + "taskType": self.task_type, + "folderId": self.parent_id, + "attrib": self.attribs.to_dict(), + } + attrib = self.attribs.to_dict() + if attrib: + output["attrib"] = attrib + + if self.active is not UNKNOWN_VALUE: + output["active"] = self.active + + if ( + self._entity_hub.allow_data_changes + and self._data is not UNKNOWN_VALUE + ): + output["data"] = self._data + return output diff --git a/openpype/vendor/python/ayon/ayon_api/events.py b/openpype/vendor/python/ayon/ayon_api/events.py new file mode 100644 index 0000000000..1ea9331244 --- /dev/null +++ b/openpype/vendor/python/ayon/ayon_api/events.py @@ -0,0 +1,52 @@ +import copy + + +class ServerEvent(object): + def __init__( + self, + topic, + sender=None, + event_hash=None, + project_name=None, + username=None, + dependencies=None, + description=None, + summary=None, + payload=None, + finished=True, + store=True, + ): + if dependencies is None: + dependencies = [] + if payload is None: + payload = {} + if summary is None: + summary = {} + + self.topic = topic + self.sender = sender + self.event_hash = event_hash + self.project_name = project_name + self.username = username + self.dependencies = dependencies + self.description = description + self.summary = summary + self.payload = payload + self.finished = finished + self.store = store + + def to_data(self): + return { + "topic": self.topic, + "sender": self.sender, + "hash": self.event_hash, + "project": self.project_name, + "user": self.username, + "dependencies": copy.deepcopy(self.dependencies), + "description": self.description, + "description": self.description, + "summary": copy.deepcopy(self.summary), + "payload": self.payload, + "finished": self.finished, + "store": self.store + } \ No newline at end of file diff --git a/openpype/vendor/python/ayon/ayon_api/exceptions.py b/openpype/vendor/python/ayon/ayon_api/exceptions.py new file mode 100644 index 0000000000..0ff09770b5 --- /dev/null +++ b/openpype/vendor/python/ayon/ayon_api/exceptions.py @@ -0,0 +1,97 @@ +import copy + + +class UrlError(Exception): + """Url cannot be parsed as url. + + Exception may contain hints of possible fixes of url that can be used in + UI if needed. + """ + + def __init__(self, message, title, hints=None): + if hints is None: + hints = [] + + self.title = title + self.hints = hints + super(UrlError, self).__init__(message) + + +class ServerError(Exception): + pass + + +class UnauthorizedError(ServerError): + pass + + +class AuthenticationError(ServerError): + pass + + +class ServerNotReached(ServerError): + pass + + +class GraphQlQueryFailed(Exception): + def __init__(self, errors, query, variables): + if variables is None: + variables = {} + + error_messages = [] + for error in errors: + msg = error["message"] + path = error.get("path") + if path: + msg += " on item '{}'".format("/".join(path)) + locations = error.get("locations") + if locations: + _locations = [ + "Line {} Column {}".format( + location["line"], location["column"] + ) + for location in locations + ] + + msg += " ({})".format(" and ".join(_locations)) + error_messages.append(msg) + + message = "GraphQl query Failed" + if error_messages: + message = "{}: {}".format(message, " | ".join(error_messages)) + + self.errors = errors + self.query = query + self.variables = copy.deepcopy(variables) + super(GraphQlQueryFailed, self).__init__(message) + + +class MissingEntityError(Exception): + pass + + +class ProjectNotFound(MissingEntityError): + def __init__(self, project_name, message=None): + if not message: + message = "Project \"{}\" was not found".format(project_name) + self.project_name = project_name + super(ProjectNotFound, self).__init__(message) + + +class FolderNotFound(MissingEntityError): + def __init__(self, project_name, folder_id, message=None): + self.project_name = project_name + self.folder_id = folder_id + if not message: + message = ( + "Folder with id \"{}\" was not found in project \"{}\"" + ).format(folder_id, project_name) + super(FolderNotFound, self).__init__(message) + + +class FailedOperations(Exception): + pass + + +class FailedServiceInit(Exception): + pass \ No newline at end of file diff --git a/openpype/vendor/python/ayon/ayon_api/graphql.py b/openpype/vendor/python/ayon/ayon_api/graphql.py new file mode 100644 index 0000000000..93349e9608 --- /dev/null +++ b/openpype/vendor/python/ayon/ayon_api/graphql.py @@ -0,0 +1,896 @@ +import copy +import numbers +from abc import ABCMeta, abstractproperty, abstractmethod + +import six + +from .exceptions import GraphQlQueryFailed + +FIELD_VALUE = object() + + +def fields_to_dict(fields): + if not fields: + return None + + output = {} + for field in fields: + hierarchy = field.split(".") + last = hierarchy.pop(-1) + value = output + for part in hierarchy: + if value is FIELD_VALUE: + break + + if part not in value: + value[part] = {} + value = value[part] + + if value is not FIELD_VALUE: + value[last] = FIELD_VALUE + return output + + +class QueryVariable(object): + """Object representing single varible used in GraphQlQuery. + + Variable definition is in GraphQl query header but it's value is used + in fields. + + Args: + variable_name (str): Name of variable in query. + """ + + def __init__(self, variable_name): + self._variable_name = variable_name + self._name = "${}".format(variable_name) + + @property + def name(self): + """Name used in field filter.""" + + return self._name + + @property + def variable_name(self): + """Name of variable in query definition.""" + + return self._variable_name + + def __hash__(self): + return self._name.__hash__() + + def __str__(self): + return self._name + + def __format__(self, *args, **kwargs): + return self._name.__format__(*args, **kwargs) + + +class GraphQlQuery: + """GraphQl query which can have fields to query. + + Single use object which can be used only for one query. Object and children + objects keep track about paging and progress. + + Args: + name (str): Name of query. + """ + + offset = 2 + + def __init__(self, name): + self._name = name + self._variables = {} + self._children = [] + self._has_multiple_edge_fields = None + + @property + def indent(self): + """Indentation for preparation of query string. + + Returns: + int: Ident spaces. + """ + + return 0 + + @property + def child_indent(self): + """Indentation for preparation of query string used by children. + + Returns: + int: Ident spaces for children. + """ + + return self.indent + + @property + def need_query(self): + """Still need query from server. + + Needed for edges which use pagination. + + Returns: + bool: If still need query from server. + """ + + for child in self._children: + if child.need_query: + return True + return False + + @property + def has_multiple_edge_fields(self): + if self._has_multiple_edge_fields is None: + edge_counter = 0 + for child in self._children: + edge_counter += child.sum_edge_fields(2) + if edge_counter > 1: + break + self._has_multiple_edge_fields = edge_counter > 1 + + return self._has_multiple_edge_fields + + def add_variable(self, key, value_type, value=None): + """Add variable to query. + + Args: + key (str): Variable name. + value_type (str): Type of expected value in variables. This is + graphql type e.g. "[String!]", "Int", "Boolean", etc. + value (Any): Default value for variable. Can be changed later. + + Returns: + QueryVariable: Created variable object. + + Raises: + KeyError: If variable was already added before. + """ + + if key in self._variables: + raise KeyError( + "Variable \"{}\" was already set with type {}.".format( + key, value_type + ) + ) + + variable = QueryVariable(key) + self._variables[key] = { + "type": value_type, + "variable": variable, + "value": value + } + return variable + + def get_variable(self, key): + """Variable object. + + Args: + key (str): Variable name added to headers. + + Returns: + QueryVariable: Variable object used in query string. + """ + + return self._variables[key]["variable"] + + def get_variable_value(self, key, default=None): + """Get Current value of variable. + + Args: + key (str): Variable name. + default (Any): Default value if variable is available. + + Returns: + Any: Variable value. + """ + + variable_item = self._variables.get(key) + if variable_item: + return variable_item["value"] + return default + + def set_variable_value(self, key, value): + """Set value for variable. + + Args: + key (str): Variable name under which the value is stored. + value (Any): Variable value used in query. Variable is not used + if value is 'None'. + """ + + self._variables[key]["value"] = value + + def get_variables_values(self): + """Calculate variable values used that should be used in query. + + Variables with value set to 'None' are skipped. + + Returns: + Dict[str, Any]: Variable values by their name. + """ + + output = {} + for key, item in self._variables.items(): + value = item["value"] + if value is not None: + output[key] = item["value"] + + return output + + def add_obj_field(self, field): + """Add field object to children. + + Args: + field (BaseGraphQlQueryField): Add field to query children. + """ + + if field in self._children: + return + + self._children.append(field) + field.set_parent(self) + + def add_field(self, name, has_edges=None): + """Add field to query. + + Args: + name (str): Field name e.g. 'id'. + has_edges (bool): Field has edges so it need paging. + + Returns: + BaseGraphQlQueryField: Created field object. + """ + + if has_edges: + item = GraphQlQueryEdgeField(name, self) + else: + item = GraphQlQueryField(name, self) + self.add_obj_field(item) + return item + + def calculate_query(self): + """Calculate query string which is sent to server. + + Returns: + str: GraphQl string with variables and headers. + + Raises: + ValueError: Query has no fiels. + """ + + if not self._children: + raise ValueError("Missing fields to query") + + variables = [] + for item in self._variables.values(): + if item["value"] is None: + continue + + variables.append( + "{}: {}".format(item["variable"], item["type"]) + ) + + variables_str = "" + if variables: + variables_str = "({})".format(",".join(variables)) + header = "query {}{}".format(self._name, variables_str) + + output = [] + output.append(header + " {") + for field in self._children: + output.append(field.calculate_query()) + output.append("}") + + return "\n".join(output) + + def parse_result(self, data, output, progress_data): + """Parse data from response for output. + + Output is stored to passed 'output' variable. That's because of paging + during which objects must have access to both new and previous values. + + Args: + data (Dict[str, Any]): Data received using calculated query. + output (Dict[str, Any]): Where parsed data are stored. + """ + + if not data: + return + + for child in self._children: + child.parse_result(data, output, progress_data) + + def query(self, con): + """Do a query from server. + + Args: + con (ServerAPI): Connection to server with 'query' method. + + Returns: + Dict[str, Any]: Parsed output from GraphQl query. + """ + + progress_data = {} + output = {} + while self.need_query: + query_str = self.calculate_query() + variables = self.get_variables_values() + response = con.query_graphql( + query_str, + self.get_variables_values() + ) + if response.errors: + raise GraphQlQueryFailed(response.errors, query_str, variables) + self.parse_result(response.data["data"], output, progress_data) + + return output + + def continuous_query(self, con): + """Do a query from server. + + Args: + con (ServerAPI): Connection to server with 'query' method. + + Returns: + Dict[str, Any]: Parsed output from GraphQl query. + """ + + progress_data = {} + if self.has_multiple_edge_fields: + output = {} + while self.need_query: + query_str = self.calculate_query() + variables = self.get_variables_values() + response = con.query_graphql(query_str, variables) + if response.errors: + raise GraphQlQueryFailed( + response.errors, query_str, variables + ) + self.parse_result(response.data["data"], output, progress_data) + + yield output + + else: + while self.need_query: + output = {} + query_str = self.calculate_query() + variables = self.get_variables_values() + response = con.query_graphql(query_str, variables) + if response.errors: + raise GraphQlQueryFailed( + response.errors, query_str, variables + ) + + self.parse_result(response.data["data"], output, progress_data) + + yield output + + +@six.add_metaclass(ABCMeta) +class BaseGraphQlQueryField(object): + """Field in GraphQl query. + + Args: + name (str): Name of field. + parent (Union[BaseGraphQlQueryField, GraphQlQuery]): Parent object of a + field. + has_edges (bool): Field has edges and should handle paging. + """ + + def __init__(self, name, parent): + if isinstance(parent, GraphQlQuery): + query_item = parent + else: + query_item = parent.query_item + + self._name = name + self._parent = parent + + self._filters = {} + + self._children = [] + # Value is changed on first parse of result + self._need_query = True + + self._query_item = query_item + + self._path = None + + def __repr__(self): + return "<{} {}>".format(self.__class__.__name__, self.path) + + @property + def need_query(self): + """Still need query from server. + + Needed for edges which use pagination. Look into children values too. + + Returns: + bool: If still need query from server. + """ + + if self._need_query: + return True + + for child in self._children: + if child.need_query: + return True + return False + + def sum_edge_fields(self, max_limit=None): + """Check how many edge fields query has. + + In case there are multiple edge fields or are nested the query can't + yield mid cursor results. + + Args: + max_limit (int): Skip rest of counting if counter is bigger then + entered number. + + Returns: + int: Counter edge fields + """ + + counter = 0 + if isinstance(self, GraphQlQueryEdgeField): + counter = 1 + + for child in self._children: + counter += child.sum_edge_fields(max_limit) + if max_limit is not None and counter >= max_limit: + break + return counter + + @property + def offset(self): + return self._query_item.offset + + @property + def indent(self): + return self._parent.child_indent + self.offset + + @abstractproperty + def child_indent(self): + pass + + @property + def query_item(self): + return self._query_item + + @abstractproperty + def has_edges(self): + pass + + @property + def child_has_edges(self): + for child in self._children: + if child.has_edges or child.child_has_edges: + return True + return False + + @property + def path(self): + """Field path for debugging purposes. + + Returns: + str: Field path in query. + """ + + if self._path is None: + if isinstance(self._parent, GraphQlQuery): + path = self._name + else: + path = "/".join((self._parent.path, self._name)) + self._path = path + return self._path + + def reset_cursor(self): + for child in self._children: + child.reset_cursor() + + def get_variable_value(self, *args, **kwargs): + return self._query_item.get_variable_value(*args, **kwargs) + + def set_variable_value(self, *args, **kwargs): + return self._query_item.set_variable_value(*args, **kwargs) + + def set_filter(self, key, value): + self._filters[key] = value + + def has_filter(self, key): + return key in self._filters + + def remove_filter(self, key): + self._filters.pop(key, None) + + def set_parent(self, parent): + if self._parent is parent: + return + self._parent = parent + parent.add_obj_field(self) + + def add_obj_field(self, field): + if field in self._children: + return + + self._children.append(field) + field.set_parent(self) + + def add_field(self, name, has_edges=None): + if has_edges: + item = GraphQlQueryEdgeField(name, self) + else: + item = GraphQlQueryField(name, self) + self.add_obj_field(item) + return item + + def _filter_value_to_str(self, value): + if isinstance(value, QueryVariable): + if self.get_variable_value(value.variable_name) is None: + return None + return str(value) + + if isinstance(value, numbers.Number): + return str(value) + + if isinstance(value, six.string_types): + return '"{}"'.format(value) + + if isinstance(value, (list, set, tuple)): + return "[{}]".format( + ", ".join( + self._filter_value_to_str(item) + for item in iter(value) + ) + ) + raise TypeError( + "Unknown type to convert '{}'".format(str(type(value))) + ) + + def get_filters(self): + """Receive filters for item. + + By default just use copy of set filters. + + Returns: + Dict[str, Any]: Fields filters. + """ + + return copy.deepcopy(self._filters) + + def _filters_to_string(self): + filters = self.get_filters() + if not filters: + return "" + + filter_items = [] + for key, value in filters.items(): + string_value = self._filter_value_to_str(value) + if string_value is None: + continue + + filter_items.append("{}: {}".format(key, string_value)) + + if not filter_items: + return "" + return "({})".format(", ".join(filter_items)) + + def _fake_children_parse(self): + """Mark children as they don't need query.""" + + for child in self._children: + child.parse_result({}, {}, {}) + + @abstractmethod + def calculate_query(self): + pass + + @abstractmethod + def parse_result(self, data, output, progress_data): + pass + + +class GraphQlQueryField(BaseGraphQlQueryField): + has_edges = False + + @property + def child_indent(self): + return self.indent + + def parse_result(self, data, output, progress_data): + if not isinstance(data, dict): + raise TypeError("{} Expected 'dict' type got '{}'".format( + self._name, str(type(data)) + )) + + self._need_query = False + value = data.get(self._name) + if value is None: + self._fake_children_parse() + if self._name in data: + output[self._name] = None + return + + if not self._children: + output[self._name] = value + return + + output_value = output.get(self._name) + if isinstance(value, dict): + if output_value is None: + output_value = {} + output[self._name] = output_value + + for child in self._children: + child.parse_result(value, output_value, progress_data) + return + + if output_value is None: + output_value = [] + output[self._name] = output_value + + if not value: + self._fake_children_parse() + return + + diff = len(value) - len(output_value) + if diff > 0: + for _ in range(diff): + output_value.append({}) + + for idx, item in enumerate(value): + item_value = output_value[idx] + for child in self._children: + child.parse_result(item, item_value, progress_data) + + def calculate_query(self): + offset = self.indent * " " + header = "{}{}{}".format( + offset, + self._name, + self._filters_to_string() + ) + if not self._children: + return header + + output = [] + output.append(header + " {") + + output.extend([ + field.calculate_query() + for field in self._children + ]) + output.append(offset + "}") + + return "\n".join(output) + + +class GraphQlQueryEdgeField(BaseGraphQlQueryField): + has_edges = True + + def __init__(self, *args, **kwargs): + super(GraphQlQueryEdgeField, self).__init__(*args, **kwargs) + self._cursor = None + + @property + def child_indent(self): + offset = self.offset * 2 + return self.indent + offset + + def reset_cursor(self): + # Reset cursor only for edges + self._cursor = None + self._need_query = True + + super(GraphQlQueryEdgeField, self).reset_cursor() + + def parse_result(self, data, output, progress_data): + if not isinstance(data, dict): + raise TypeError("{} Expected 'dict' type got '{}'".format( + self._name, str(type(data)) + )) + + value = data.get(self._name) + if value is None: + self._fake_children_parse() + self._need_query = False + return + + if self._name in output: + node_values = output[self._name] + else: + node_values = [] + output[self._name] = node_values + + handle_cursors = self.child_has_edges + if handle_cursors: + cursor_key = self._get_cursor_key() + if cursor_key in progress_data: + nodes_by_cursor = progress_data[cursor_key] + else: + nodes_by_cursor = {} + progress_data[cursor_key] = nodes_by_cursor + + page_info = value["pageInfo"] + new_cursor = page_info["endCursor"] + self._need_query = page_info["hasNextPage"] + edges = value["edges"] + # Fake result parse + if not edges: + self._fake_children_parse() + + for edge in edges: + if not handle_cursors: + edge_value = {} + node_values.append(edge_value) + else: + edge_cursor = edge["cursor"] + edge_value = nodes_by_cursor.get(edge_cursor) + if edge_value is None: + edge_value = {} + nodes_by_cursor[edge_cursor] = edge_value + node_values.append(edge_value) + + for child in self._children: + child.parse_result(edge["node"], edge_value, progress_data) + + if not self._need_query: + return + + change_cursor = True + for child in self._children: + if child.need_query: + change_cursor = False + + if change_cursor: + for child in self._children: + child.reset_cursor() + self._cursor = new_cursor + + def _get_cursor_key(self): + return "{}/__cursor__".format(self.path) + + def get_filters(self): + filters = super(GraphQlQueryEdgeField, self).get_filters() + + filters["first"] = 300 + if self._cursor: + filters["after"] = self._cursor + return filters + + def calculate_query(self): + if not self._children: + raise ValueError("Missing child definitions for edges {}".format( + self.path + )) + + offset = self.indent * " " + header = "{}{}{}".format( + offset, + self._name, + self._filters_to_string() + ) + + output = [] + output.append(header + " {") + + edges_offset = offset + self.offset * " " + node_offset = edges_offset + self.offset * " " + output.append(edges_offset + "edges {") + output.append(node_offset + "node {") + + for field in self._children: + output.append( + field.calculate_query() + ) + + output.append(node_offset + "}") + if self.child_has_edges: + output.append(node_offset + "cursor") + output.append(edges_offset + "}") + + # Add page information + output.append(edges_offset + "pageInfo {") + for page_key in ( + "endCursor", + "hasNextPage", + ): + output.append(node_offset + page_key) + output.append(edges_offset + "}") + output.append(offset + "}") + + return "\n".join(output) + + +INTROSPECTION_QUERY = """ + query IntrospectionQuery { + __schema { + queryType { name } + mutationType { name } + subscriptionType { name } + types { + ...FullType + } + directives { + name + description + locations + args { + ...InputValue + } + } + } + } + fragment FullType on __Type { + kind + name + description + fields(includeDeprecated: true) { + name + description + args { + ...InputValue + } + type { + ...TypeRef + } + isDeprecated + deprecationReason + } + inputFields { + ...InputValue + } + interfaces { + ...TypeRef + } + enumValues(includeDeprecated: true) { + name + description + isDeprecated + deprecationReason + } + possibleTypes { + ...TypeRef + } + } + fragment InputValue on __InputValue { + name + description + type { ...TypeRef } + defaultValue + } + fragment TypeRef on __Type { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + } + } + } + } + } + } + } + } +""" diff --git a/openpype/vendor/python/ayon/ayon_api/graphql_queries.py b/openpype/vendor/python/ayon/ayon_api/graphql_queries.py new file mode 100644 index 0000000000..b6d5c5fcb3 --- /dev/null +++ b/openpype/vendor/python/ayon/ayon_api/graphql_queries.py @@ -0,0 +1,362 @@ +import collections + +from .graphql import FIELD_VALUE, GraphQlQuery + + +def fields_to_dict(fields): + if not fields: + return None + + output = {} + for field in fields: + hierarchy = field.split(".") + last = hierarchy.pop(-1) + value = output + for part in hierarchy: + if value is FIELD_VALUE: + break + + if part not in value: + value[part] = {} + value = value[part] + + if value is not FIELD_VALUE: + value[last] = FIELD_VALUE + return output + + +def project_graphql_query(fields): + query = GraphQlQuery("ProjectQuery") + project_name_var = query.add_variable("projectName", "String!") + project_field = query.add_field("project") + project_field.set_filter("name", project_name_var) + + nested_fields = fields_to_dict(fields) + + query_queue = collections.deque() + for key, value in nested_fields.items(): + query_queue.append((key, value, project_field)) + + while query_queue: + item = query_queue.popleft() + key, value, parent = item + field = parent.add_field(key) + if value is FIELD_VALUE: + continue + + for k, v in value.items(): + query_queue.append((k, v, field)) + return query + + +def projects_graphql_query(fields): + query = GraphQlQuery("ProjectsQuery") + projects_field = query.add_field("projects", has_edges=True) + + nested_fields = fields_to_dict(fields) + + query_queue = collections.deque() + for key, value in nested_fields.items(): + query_queue.append((key, value, projects_field)) + + while query_queue: + item = query_queue.popleft() + key, value, parent = item + field = parent.add_field(key) + if value is FIELD_VALUE: + continue + + for k, v in value.items(): + query_queue.append((k, v, field)) + return query + + +def folders_graphql_query(fields): + query = GraphQlQuery("FoldersQuery") + project_name_var = query.add_variable("projectName", "String!") + folder_ids_var = query.add_variable("folderIds", "[String!]") + parent_folder_ids_var = query.add_variable("parentFolderIds", "[String!]") + folder_paths_var = query.add_variable("folderPaths", "[String!]") + folder_names_var = query.add_variable("folderNames", "[String!]") + has_subsets_var = query.add_variable("folderHasSubsets", "Boolean!") + + project_field = query.add_field("project") + project_field.set_filter("name", project_name_var) + + folders_field = project_field.add_field("folders", has_edges=True) + folders_field.set_filter("ids", folder_ids_var) + folders_field.set_filter("parentIds", parent_folder_ids_var) + folders_field.set_filter("names", folder_names_var) + folders_field.set_filter("paths", folder_paths_var) + folders_field.set_filter("hasSubsets", has_subsets_var) + + nested_fields = fields_to_dict(fields) + + query_queue = collections.deque() + for key, value in nested_fields.items(): + query_queue.append((key, value, folders_field)) + + while query_queue: + item = query_queue.popleft() + key, value, parent = item + field = parent.add_field(key) + if value is FIELD_VALUE: + continue + + for k, v in value.items(): + query_queue.append((k, v, field)) + return query + + +def tasks_graphql_query(fields): + query = GraphQlQuery("TasksQuery") + project_name_var = query.add_variable("projectName", "String!") + task_ids_var = query.add_variable("taskIds", "[String!]") + task_names_var = query.add_variable("taskNames", "[String!]") + task_types_var = query.add_variable("taskTypes", "[String!]") + folder_ids_var = query.add_variable("folderIds", "[String!]") + + project_field = query.add_field("project") + project_field.set_filter("name", project_name_var) + + tasks_field = project_field.add_field("tasks", has_edges=True) + tasks_field.set_filter("ids", task_ids_var) + # WARNING: At moment when this been created 'names' filter is not supported + tasks_field.set_filter("names", task_names_var) + tasks_field.set_filter("taskTypes", task_types_var) + tasks_field.set_filter("folderIds", folder_ids_var) + + nested_fields = fields_to_dict(fields) + + query_queue = collections.deque() + for key, value in nested_fields.items(): + query_queue.append((key, value, tasks_field)) + + while query_queue: + item = query_queue.popleft() + key, value, parent = item + field = parent.add_field(key) + if value is FIELD_VALUE: + continue + + for k, v in value.items(): + query_queue.append((k, v, field)) + return query + + +def subsets_graphql_query(fields): + query = GraphQlQuery("SubsetsQuery") + + project_name_var = query.add_variable("projectName", "String!") + folder_ids_var = query.add_variable("folderIds", "[String!]") + subset_ids_var = query.add_variable("subsetIds", "[String!]") + subset_names_var = query.add_variable("subsetNames", "[String!]") + + project_field = query.add_field("project") + project_field.set_filter("name", project_name_var) + + subsets_field = project_field.add_field("subsets", has_edges=True) + subsets_field.set_filter("ids", subset_ids_var) + subsets_field.set_filter("names", subset_names_var) + subsets_field.set_filter("folderIds", folder_ids_var) + + nested_fields = fields_to_dict(set(fields)) + + query_queue = collections.deque() + for key, value in nested_fields.items(): + query_queue.append((key, value, subsets_field)) + + while query_queue: + item = query_queue.popleft() + key, value, parent = item + field = parent.add_field(key) + if value is FIELD_VALUE: + continue + + for k, v in value.items(): + query_queue.append((k, v, field)) + return query + + +def versions_graphql_query(fields): + query = GraphQlQuery("VersionsQuery") + + project_name_var = query.add_variable("projectName", "String!") + subset_ids_var = query.add_variable("subsetIds", "[String!]") + version_ids_var = query.add_variable("versionIds", "[String!]") + versions_var = query.add_variable("versions", "[Int!]") + hero_only_var = query.add_variable("heroOnly", "Boolean") + latest_only_var = query.add_variable("latestOnly", "Boolean") + hero_or_latest_only_var = query.add_variable( + "heroOrLatestOnly", "Boolean" + ) + + project_field = query.add_field("project") + project_field.set_filter("name", project_name_var) + + subsets_field = project_field.add_field("versions", has_edges=True) + subsets_field.set_filter("ids", version_ids_var) + subsets_field.set_filter("subsetIds", subset_ids_var) + subsets_field.set_filter("versions", versions_var) + subsets_field.set_filter("heroOnly", hero_only_var) + subsets_field.set_filter("latestOnly", latest_only_var) + subsets_field.set_filter("heroOrLatestOnly", hero_or_latest_only_var) + + nested_fields = fields_to_dict(set(fields)) + + query_queue = collections.deque() + for key, value in nested_fields.items(): + query_queue.append((key, value, subsets_field)) + + while query_queue: + item = query_queue.popleft() + key, value, parent = item + field = parent.add_field(key) + if value is FIELD_VALUE: + continue + + for k, v in value.items(): + query_queue.append((k, v, field)) + return query + + +def representations_graphql_query(fields): + query = GraphQlQuery("RepresentationsQuery") + + project_name_var = query.add_variable("projectName", "String!") + repre_ids_var = query.add_variable("representationIds", "[String!]") + repre_names_var = query.add_variable("representationNames", "[String!]") + version_ids_var = query.add_variable("versionIds", "[String!]") + + project_field = query.add_field("project") + project_field.set_filter("name", project_name_var) + + repres_field = project_field.add_field("representations", has_edges=True) + repres_field.set_filter("ids", repre_ids_var) + repres_field.set_filter("versionIds", version_ids_var) + repres_field.set_filter("names", repre_names_var) + + nested_fields = fields_to_dict(set(fields)) + + query_queue = collections.deque() + for key, value in nested_fields.items(): + query_queue.append((key, value, repres_field)) + + while query_queue: + item = query_queue.popleft() + key, value, parent = item + field = parent.add_field(key) + if value is FIELD_VALUE: + continue + + for k, v in value.items(): + query_queue.append((k, v, field)) + return query + + +def representations_parents_qraphql_query( + version_fields, subset_fields, folder_fields +): + + query = GraphQlQuery("RepresentationsParentsQuery") + + project_name_var = query.add_variable("projectName", "String!") + repre_ids_var = query.add_variable("representationIds", "[String!]") + + project_field = query.add_field("project") + project_field.set_filter("name", project_name_var) + + repres_field = project_field.add_field("representations", has_edges=True) + repres_field.add_field("id") + repres_field.set_filter("ids", repre_ids_var) + version_field = repres_field.add_field("version") + + fields_queue = collections.deque() + for key, value in fields_to_dict(version_fields).items(): + fields_queue.append((key, value, version_field)) + + subset_field = version_field.add_field("subset") + for key, value in fields_to_dict(subset_fields).items(): + fields_queue.append((key, value, subset_field)) + + folder_field = subset_field.add_field("folder") + for key, value in fields_to_dict(folder_fields).items(): + fields_queue.append((key, value, folder_field)) + + while fields_queue: + item = fields_queue.popleft() + key, value, parent = item + field = parent.add_field(key) + if value is FIELD_VALUE: + continue + + for k, v in value.items(): + fields_queue.append((k, v, field)) + + return query + + +def workfiles_info_graphql_query(fields): + query = GraphQlQuery("WorkfilesInfo") + project_name_var = query.add_variable("projectName", "String!") + workfiles_info_ids = query.add_variable("workfileIds", "[String!]") + task_ids_var = query.add_variable("taskIds", "[String!]") + paths_var = query.add_variable("paths", "[String!]") + + project_field = query.add_field("project") + project_field.set_filter("name", project_name_var) + + workfiles_field = project_field.add_field("workfiles", has_edges=True) + workfiles_field.set_filter("ids", workfiles_info_ids) + workfiles_field.set_filter("taskIds", task_ids_var) + workfiles_field.set_filter("paths", paths_var) + + nested_fields = fields_to_dict(set(fields)) + + query_queue = collections.deque() + for key, value in nested_fields.items(): + query_queue.append((key, value, workfiles_field)) + + while query_queue: + item = query_queue.popleft() + key, value, parent = item + field = parent.add_field(key) + if value is FIELD_VALUE: + continue + + for k, v in value.items(): + query_queue.append((k, v, field)) + return query + + +def events_graphql_query(fields): + query = GraphQlQuery("WorkfilesInfo") + topics_var = query.add_variable("eventTopics", "[String!]") + projects_var = query.add_variable("projectNames", "[String!]") + states_var = query.add_variable("eventStates", "[String!]") + users_var = query.add_variable("eventUsers", "[String!]") + include_logs_var = query.add_variable("includeLogsFilter", "Boolean!") + + events_field = query.add_field("events", has_edges=True) + events_field.set_filter("topics", topics_var) + events_field.set_filter("projects", projects_var) + events_field.set_filter("states", states_var) + events_field.set_filter("users", users_var) + events_field.set_filter("includeLogs", include_logs_var) + + nested_fields = fields_to_dict(set(fields)) + + query_queue = collections.deque() + for key, value in nested_fields.items(): + query_queue.append((key, value, events_field)) + + while query_queue: + item = query_queue.popleft() + key, value, parent = item + field = parent.add_field(key) + if value is FIELD_VALUE: + continue + + for k, v in value.items(): + query_queue.append((k, v, field)) + return query diff --git a/openpype/vendor/python/ayon/ayon_api/operations.py b/openpype/vendor/python/ayon/ayon_api/operations.py new file mode 100644 index 0000000000..21adc229d2 --- /dev/null +++ b/openpype/vendor/python/ayon/ayon_api/operations.py @@ -0,0 +1,688 @@ +import copy +import collections +import uuid +from abc import ABCMeta, abstractproperty + +import six + +from ._api import get_server_api_connection +from .utils import create_entity_id, REMOVED_VALUE + + +def _create_or_convert_to_id(entity_id=None): + if entity_id is None: + return create_entity_id() + + # Validate if can be converted to uuid + uuid.UUID(entity_id) + return entity_id + + +def new_folder_entity( + name, + folder_type, + parent_id=None, + attribs=None, + data=None, + thumbnail_id=None, + entity_id=None +): + """Create skeleton data of folder entity. + + Args: + name (str): Is considered as unique identifier of folder in project. + parent_id (str): Id of parent folder. + attribs (Dict[str, Any]): Explicitly set attributes of folder. + data (Dict[str, Any]): Custom folder data. Empty dictionary is used + if not passed. + thumbnail_id (str): Id of thumbnail related to folder. + entity_id (str): Predefined id of entity. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of folder entity. + """ + + if attribs is None: + attribs = {} + + if data is None: + data = {} + + if parent_id is not None: + parent_id = _create_or_convert_to_id(parent_id) + + return { + "id": _create_or_convert_to_id(entity_id), + "name": name, + # This will be ignored + "folderType": folder_type, + "parentId": parent_id, + "data": data, + "attrib": attribs, + "thumbnailId": thumbnail_id + } + + +def new_subset_entity( + name, family, folder_id, attribs=None, data=None, entity_id=None +): + """Create skeleton data of subset entity. + + Args: + name (str): Is considered as unique identifier of subset under folder. + family (str): Subset's family. + folder_id (str): Id of parent folder. + attribs (Dict[str, Any]): Explicitly set attributes of subset. + data (Dict[str, Any]): Subset entity data. Empty dictionary is used + if not passed. Value of 'family' is used to fill 'family'. + entity_id (str): Predefined id of entity. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of subset entity. + """ + + if attribs is None: + attribs = {} + + if data is None: + data = {} + + return { + "id": _create_or_convert_to_id(entity_id), + "name": name, + "family": family, + "attrib": attribs, + "data": data, + "folderId": _create_or_convert_to_id(folder_id) + } + + +def new_version_entity( + version, + subset_id, + task_id=None, + thumbnail_id=None, + author=None, + attribs=None, + data=None, + entity_id=None +): + """Create skeleton data of version entity. + + Args: + version (int): Is considered as unique identifier of version + under subset. + subset_id (str): Id of parent subset. + task_id (str): Id of task under which subset was created. + thumbnail_id (str): Thumbnail related to version. + author (str): Name of version author. + attribs (Dict[str, Any]): Explicitly set attributes of version. + data (Dict[str, Any]): Version entity custom data. + entity_id (str): Predefined id of entity. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version entity. + """ + + if attribs is None: + attribs = {} + + if data is None: + data = {} + + if data is None: + data = {} + + output = { + "id": _create_or_convert_to_id(entity_id), + "version": int(version), + "subsetId": _create_or_convert_to_id(subset_id), + "attrib": attribs, + "data": data + } + if task_id: + output["taskId"] = task_id + if thumbnail_id: + output["thumbnailId"] = thumbnail_id + if author: + output["author"] = author + return output + + +def new_hero_version_entity( + version, + subset_id, + task_id=None, + thumbnail_id=None, + author=None, + attribs=None, + data=None, + entity_id=None +): + """Create skeleton data of hero version entity. + + Args: + version (int): Is considered as unique identifier of version + under subset. Should be same as standard version if there is any. + subset_id (str): Id of parent subset. + task_id (str): Id of task under which subset was created. + thumbnail_id (str): Thumbnail related to version. + author (str): Name of version author. + attribs (Dict[str, Any]): Explicitly set attributes of version. + data (Dict[str, Any]): Version entity data. + entity_id (str): Predefined id of entity. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version entity. + """ + + if attribs is None: + attribs = {} + + if data is None: + data = {} + + output = { + "id": _create_or_convert_to_id(entity_id), + "version": -abs(int(version)), + "subsetId": subset_id, + "attrib": attribs, + "data": data + } + if task_id: + output["taskId"] = task_id + if thumbnail_id: + output["thumbnailId"] = thumbnail_id + if author: + output["author"] = author + return output + + +def new_representation_entity( + name, version_id, attribs=None, data=None, entity_id=None +): + """Create skeleton data of representation entity. + + Args: + name (str): Representation name considered as unique identifier + of representation under version. + version_id (str): Id of parent version. + attribs (Dict[str, Any]): Explicitly set attributes of representation. + data (Dict[str, Any]): Representation entity data. + entity_id (str): Predefined id of entity. New id is created + if not passed. + + Returns: + Dict[str, Any]: Skeleton of representation entity. + """ + + if attribs is None: + attribs = {} + + if data is None: + data = {} + + return { + "id": _create_or_convert_to_id(entity_id), + "versionId": _create_or_convert_to_id(version_id), + "name": name, + "data": data, + "attrib": attribs + } + + +def new_workfile_info_doc( + filename, folder_id, task_name, files, data=None, entity_id=None +): + """Create skeleton data of workfile info entity. + + Workfile entity is at this moment used primarily for artist notes. + + Args: + filename (str): Filename of workfile. + folder_id (str): Id of folder under which workfile live. + task_name (str): Task under which was workfile created. + files (List[str]): List of rootless filepaths related to workfile. + data (Dict[str, Any]): Additional metadata. + entity_id (str): Predefined id of entity. New id is created + if not passed. + + Returns: + Dict[str, Any]: Skeleton of workfile info entity. + """ + + if not data: + data = {} + + return { + "id": _create_or_convert_to_id(entity_id), + "parent": _create_or_convert_to_id(folder_id), + "task_name": task_name, + "filename": filename, + "data": data, + "files": files + } + + +@six.add_metaclass(ABCMeta) +class AbstractOperation(object): + """Base operation class. + + Opration represent a call into database. The call can create, change or + remove data. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'folder', 'representation' etc. + """ + + def __init__(self, project_name, entity_type, session): + self._project_name = project_name + self._entity_type = entity_type + self._session = session + self._id = str(uuid.uuid4()) + + @property + def project_name(self): + return self._project_name + + @property + def id(self): + """Identifier of operation.""" + + return self._id + + @property + def entity_type(self): + return self._entity_type + + @abstractproperty + def operation_name(self): + """Stringified type of operation.""" + + pass + + def to_data(self): + """Convert opration to data that can be converted to json or others. + + Returns: + Dict[str, Any]: Description of operation. + """ + + return { + "id": self._id, + "entity_type": self.entity_type, + "project_name": self.project_name, + "operation": self.operation_name + } + + +class CreateOperation(AbstractOperation): + """Opeartion to create an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'folder', 'representation' etc. + data (Dict[str, Any]): Data of entity that will be created. + """ + + operation_name = "create" + + def __init__(self, project_name, entity_type, data, session): + if not data: + data = {} + else: + data = copy.deepcopy(dict(data)) + + if "id" not in data: + data["id"] = create_entity_id() + + self._data = data + super(CreateOperation, self).__init__( + project_name, entity_type, session + ) + + def __setitem__(self, key, value): + self.set_value(key, value) + + def __getitem__(self, key): + return self.data[key] + + def set_value(self, key, value): + self.data[key] = value + + def get(self, key, *args, **kwargs): + return self.data.get(key, *args, **kwargs) + + @property + def con(self): + return self.session.con + + @property + def session(self): + return self._session + + @property + def entity_id(self): + return self._data["id"] + + @property + def data(self): + return self._data + + def to_data(self): + output = super(CreateOperation, self).to_data() + output["data"] = copy.deepcopy(self.data) + return output + + def to_server_operation(self): + return { + "id": self.id, + "type": "create", + "entityType": self.entity_type, + "entityId": self.entity_id, + "data": self._data + } + + +class UpdateOperation(AbstractOperation): + """Operation to update an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'folder', 'representation' etc. + entity_id (str): Identifier of an entity. + update_data (Dict[str, Any]): Key -> value changes that will be set in + database. If value is set to 'REMOVED_VALUE' the key will be + removed. Only first level of dictionary is checked (on purpose). + """ + + operation_name = "update" + + def __init__( + self, project_name, entity_type, entity_id, update_data, session + ): + super(UpdateOperation, self).__init__( + project_name, entity_type, session + ) + + self._entity_id = entity_id + self._update_data = update_data + + @property + def entity_id(self): + return self._entity_id + + @property + def update_data(self): + return self._update_data + + @property + def con(self): + return self.session.con + + @property + def session(self): + return self._session + + def to_data(self): + changes = {} + for key, value in self._update_data.items(): + if value is REMOVED_VALUE: + value = None + changes[key] = value + + output = super(UpdateOperation, self).to_data() + output.update({ + "entity_id": self.entity_id, + "changes": changes + }) + return output + + def to_server_operation(self): + if not self._update_data: + return None + + update_data = {} + for key, value in self._update_data.items(): + if value is REMOVED_VALUE: + value = None + update_data[key] = value + + return { + "id": self.id, + "type": "update", + "entityType": self.entity_type, + "entityId": self.entity_id, + "data": update_data + } + + +class DeleteOperation(AbstractOperation): + """Opeartion to delete an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'folder', 'representation' etc. + entity_id (str): Entity id that will be removed. + """ + + operation_name = "delete" + + def __init__(self, project_name, entity_type, entity_id, session): + self._entity_id = entity_id + + super(DeleteOperation, self).__init__( + project_name, entity_type, session + ) + + @property + def entity_id(self): + return self._entity_id + + @property + def con(self): + return self.session.con + + @property + def session(self): + return self._session + + def to_data(self): + output = super(DeleteOperation, self).to_data() + output["entity_id"] = self.entity_id + return output + + def to_server_operation(self): + return { + "id": self.id, + "type": self.operation_name, + "entityId": self.entity_id, + "entityType": self.entity_type, + } + + +class OperationsSession(object): + """Session storing operations that should happen in an order. + + At this moment does not handle anything special can be sonsidered as + stupid list of operations that will happen after each other. If creation + of same entity is there multiple times it's handled in any way and entity + values are not validated. + + All operations must be related to single project. + + Args: + project_name (str): Project name to which are operations related. + """ + + def __init__(self, con=None): + if con is None: + con = get_server_api_connection() + self._con = con + self._project_cache = {} + self._operations = [] + self._nested_operations = collections.defaultdict(list) + + @property + def con(self): + return self._con + + def get_project(self, project_name): + if project_name not in self._project_cache: + self._project_cache[project_name] = self.con.get_project( + project_name) + return copy.deepcopy(self._project_cache[project_name]) + + def __len__(self): + return len(self._operations) + + def add(self, operation): + """Add operation to be processed. + + Args: + operation (BaseOperation): Operation that should be processed. + """ + if not isinstance( + operation, + (CreateOperation, UpdateOperation, DeleteOperation) + ): + raise TypeError("Expected Operation object got {}".format( + str(type(operation)) + )) + + self._operations.append(operation) + + def append(self, operation): + """Add operation to be processed. + + Args: + operation (BaseOperation): Operation that should be processed. + """ + + self.add(operation) + + def extend(self, operations): + """Add operations to be processed. + + Args: + operations (List[BaseOperation]): Operations that should be + processed. + """ + + for operation in operations: + self.add(operation) + + def remove(self, operation): + """Remove operation.""" + + self._operations.remove(operation) + + def clear(self): + """Clear all registered operations.""" + + self._operations = [] + + def to_data(self): + return [ + operation.to_data() + for operation in self._operations + ] + + def commit(self): + """Commit session operations.""" + + operations, self._operations = self._operations, [] + if not operations: + return + + operations_by_project = collections.defaultdict(list) + for operation in operations: + operations_by_project[operation.project_name].append(operation) + + for project_name, operations in operations_by_project.items(): + operations_body = [] + for operation in operations: + body = operation.to_server_operation() + if body is not None: + operations_body.append(body) + + self._con.send_batch_operations( + project_name, operations_body, can_fail=False + ) + + def create_entity(self, project_name, entity_type, data, nested_id=None): + """Fast access to 'CreateOperation'. + + Args: + project_name (str): On which project the creation happens. + entity_type (str): Which entity type will be created. + data (Dicst[str, Any]): Entity data. + nested_id (str): Id of other operation from which is triggered + operation -> Operations can trigger suboperations but they + must be added to operations list after it's parent is added. + + Returns: + CreateOperation: Object of update operation. + """ + + operation = CreateOperation( + project_name, entity_type, data, self + ) + + if nested_id: + self._nested_operations[nested_id].append(operation) + else: + self.add(operation) + if operation.id in self._nested_operations: + self.extend(self._nested_operations.pop(operation.id)) + + return operation + + def update_entity( + self, project_name, entity_type, entity_id, update_data, nested_id=None + ): + """Fast access to 'UpdateOperation'. + + Returns: + UpdateOperation: Object of update operation. + """ + + operation = UpdateOperation( + project_name, entity_type, entity_id, update_data, self + ) + if nested_id: + self._nested_operations[nested_id].append(operation) + else: + self.add(operation) + if operation.id in self._nested_operations: + self.extend(self._nested_operations.pop(operation.id)) + return operation + + def delete_entity( + self, project_name, entity_type, entity_id, nested_id=None + ): + """Fast access to 'DeleteOperation'. + + Returns: + DeleteOperation: Object of delete operation. + """ + + operation = DeleteOperation( + project_name, entity_type, entity_id, self + ) + if nested_id: + self._nested_operations[nested_id].append(operation) + else: + self.add(operation) + if operation.id in self._nested_operations: + self.extend(self._nested_operations.pop(operation.id)) + return operation diff --git a/openpype/vendor/python/ayon/ayon_api/server_api.py b/openpype/vendor/python/ayon/ayon_api/server_api.py new file mode 100644 index 0000000000..e3a42e4dad --- /dev/null +++ b/openpype/vendor/python/ayon/ayon_api/server_api.py @@ -0,0 +1,4247 @@ +import os +import re +import io +import json +import logging +import collections +import platform +import copy +import uuid +from contextlib import contextmanager +try: + from http import HTTPStatus +except ImportError: + HTTPStatus = None + +import requests + +from .constants import ( + DEFAULT_PROJECT_FIELDS, + DEFAULT_FOLDER_FIELDS, + DEFAULT_TASK_FIELDS, + DEFAULT_SUBSET_FIELDS, + DEFAULT_VERSION_FIELDS, + DEFAULT_REPRESENTATION_FIELDS, + REPRESENTATION_FILES_FIELDS, + DEFAULT_WORKFILE_INFO_FIELDS, + DEFAULT_EVENT_FIELDS, +) +from .thumbnails import ThumbnailCache +from .graphql import GraphQlQuery, INTROSPECTION_QUERY +from .graphql_queries import ( + project_graphql_query, + projects_graphql_query, + folders_graphql_query, + tasks_graphql_query, + subsets_graphql_query, + versions_graphql_query, + representations_graphql_query, + representations_parents_qraphql_query, + workfiles_info_graphql_query, + events_graphql_query, +) +from .exceptions import ( + FailedOperations, + UnauthorizedError, + AuthenticationError, + ServerNotReached, + ServerError, +) +from .utils import ( + RepresentationParents, + prepare_query_string, + logout_from_server, + create_entity_id, + entity_data_json_default, + failed_json_default, + TransferProgress, +) + +PatternType = type(re.compile("")) +JSONDecodeError = getattr(json, "JSONDecodeError", ValueError) +# This should be collected from server schema +PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_" +PROJECT_NAME_REGEX = re.compile( + "^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS) +) + + +def _get_description(response): + if HTTPStatus is None: + return str(response.orig_response) + return HTTPStatus(response.status).description + + +class RequestType: + def __init__(self, name): + self.name = name + + def __hash__(self): + return self.name.__hash__() + + +class RequestTypes: + get = RequestType("GET") + post = RequestType("POST") + put = RequestType("PUT") + patch = RequestType("PATCH") + delete = RequestType("DELETE") + + +class RestApiResponse(object): + """API Response.""" + + def __init__(self, response, data=None): + if response is None: + status_code = 500 + else: + status_code = response.status_code + self._response = response + self.status = status_code + self._data = data + + @property + def orig_response(self): + return self._response + + @property + def headers(self): + return self._response.headers + + @property + def data(self): + if self._data is None: + if self.status != 204: + self._data = self.orig_response.json() + else: + self._data = {} + return self._data + + @property + def content(self): + return self._response.content + + @property + def content_type(self): + return self.headers.get("Content-Type") + + @property + def detail(self): + return self.get("detail", _get_description(self)) + + @property + def status_code(self): + return self.status + + def raise_for_status(self): + self._response.raise_for_status() + + def __enter__(self, *args, **kwargs): + return self._response.__enter__(*args, **kwargs) + + def __contains__(self, key): + return key in self.data + + def __repr__(self): + return "<{}: {} ({})>".format( + self.__class__.__name__, self.status, self.detail + ) + + def __len__(self): + return 200 <= self.status < 400 + + def __getitem__(self, key): + return self.data[key] + + def get(self, key, default=None): + data = self.data + if isinstance(data, dict): + return self.data.get(key, default) + return default + + +class GraphQlResponse: + def __init__(self, data): + self.data = data + self.errors = data.get("errors") + + def __len__(self): + if self.errors: + return 0 + return 1 + + def __repr__(self): + if self.errors: + return "<{} errors={}>".format( + self.__class__.__name__, self.errors[0]['message'] + ) + return "<{}>".format(self.__class__.__name__) + + +def fill_own_attribs(entity): + if not entity or not entity.get("attrib"): + return + + attributes = set(entity["ownAttrib"]) + + own_attrib = {} + entity["ownAttrib"] = own_attrib + + for key, value in entity["attrib"].items(): + if key not in attributes: + own_attrib[key] = None + else: + own_attrib[key] = copy.deepcopy(value) + + +class _AsUserStack: + """Handle stack of users used over server api connection in service mode. + + ServerAPI can behave as other users if it is using special API key. + + Examples: + >>> stack = _AsUserStack() + >>> stack.set_default_username("DefaultName") + >>> print(stack.username) + DefaultName + >>> with stack.as_user("Other1"): + ... print(stack.username) + ... with stack.as_user("Other2"): + ... print(stack.username) + ... print(stack.username) + ... stack.clear() + ... print(stack.username) + Other1 + Other2 + Other1 + None + >>> print(stack.username) + None + >>> stack.set_default_username("DefaultName") + >>> print(stack.username) + DefaultName + """ + + def __init__(self): + self._users_by_id = {} + self._user_ids = [] + self._last_user = None + self._default_user = None + + def clear(self): + self._users_by_id = {} + self._user_ids = [] + self._last_user = None + self._default_user = None + + @property + def username(self): + # Use '_user_ids' for boolean check to have ability "unset" + # default user + if self._user_ids: + return self._last_user + return self._default_user + + def get_default_username(self): + return self._default_user + + def set_default_username(self, username=None): + self._default_user = username + + default_username = property(get_default_username, set_default_username) + + @contextmanager + def as_user(self, username): + self._last_user = username + user_id = uuid.uuid4().hex + self._user_ids.append(user_id) + self._users_by_id[user_id] = username + try: + yield + finally: + self._users_by_id.pop(user_id, None) + if not self._user_ids: + return + + # First check if is the user id the last one + was_last = self._user_ids[-1] == user_id + # Remove id from variables + if user_id in self._user_ids: + self._user_ids.remove(user_id) + + if not was_last: + return + + new_last_user = None + if self._user_ids: + new_last_user = self._users_by_id.get(self._user_ids[-1]) + self._last_user = new_last_user + + +class ServerAPI(object): + """Base handler of connection to server. + + Requires url to server which is used as base for api and graphql calls. + + Login cause that a session is used + + Args: + base_url (str): Example: http://localhost:5000 + token (str): Access token (api key) to server. + site_id (str): Unique name of site. Should be the same when + connection is created from the same machine under same user. + client_version (str): Version of client application (used in + desktop client application). + default_settings_variant (Union[str, None]): Settings variant used by + default if a method for settings won't get any (by default is + 'production'). + """ + + def __init__( + self, + base_url, + token=None, + site_id=None, + client_version=None, + default_settings_variant=None + ): + if not base_url: + raise ValueError("Invalid server URL {}".format(str(base_url))) + + base_url = base_url.rstrip("/") + self._base_url = base_url + self._rest_url = "{}/api".format(base_url) + self._graphl_url = "{}/graphql".format(base_url) + self._log = None + self._access_token = token + self._site_id = site_id + self._client_version = client_version + self._default_settings_variant = default_settings_variant + self._access_token_is_service = None + self._token_is_valid = None + self._server_available = None + + self._session = None + + self._base_functions_mapping = { + RequestTypes.get: requests.get, + RequestTypes.post: requests.post, + RequestTypes.put: requests.put, + RequestTypes.patch: requests.patch, + RequestTypes.delete: requests.delete + } + self._session_functions_mapping = {} + + # Attributes cache + self._attributes_schema = None + self._entity_type_attributes_cache = {} + + self._as_user_stack = _AsUserStack() + self._thumbnail_cache = ThumbnailCache(True) + + @property + def log(self): + if self._log is None: + self._log = logging.getLogger(self.__class__.__name__) + return self._log + + def get_base_url(self): + return self._base_url + + def get_rest_url(self): + return self._rest_url + + base_url = property(get_base_url) + rest_url = property(get_rest_url) + + @property + def access_token(self): + """Access token used for authorization to server. + + Returns: + Union[str, None]: Token string or None if not authorized yet. + """ + + return self._access_token + + def get_site_id(self): + """Site id used for connection. + + Site id tells server from which machine/site is connection created and + is used for default site overrides when settings are received. + + Returns: + Union[str, None]: Site id value or None if not filled. + """ + + return self._site_id + + def set_site_id(self, site_id): + """Change site id of connection. + + Behave as specific site for server. It affects default behavior of + settings getter methods. + + Args: + site_id (Union[str, None]): Site id value, or 'None' to unset. + """ + + if self._site_id == site_id: + return + self._site_id = site_id + # Recreate session on machine id change + self._update_session_headers() + + site_id = property(get_site_id, set_site_id) + + def get_client_version(self): + """Version of client used to connect to server. + + Client version is AYON client build desktop application. + + Returns: + str: Client version string used in connection. + """ + + return self._client_version + + def set_client_version(self, client_version): + """Set version of client used to connect to server. + + Client version is AYON client build desktop application. + + Args: + client_version (Union[str, None]): Client version string. + """ + + if self._client_version == client_version: + return + + self._client_version = client_version + self._update_session_headers() + + client_version = property(get_client_version, set_client_version) + + def get_default_settings_variant(self): + """Default variant used for settings. + + Returns: + Union[str, None]: name of variant or None. + """ + + return self._default_settings_variant + + def set_default_settings_variant(self, variant): + """Change default variant for addon settings. + + Note: + It is recommended to set only 'production' or 'staging' variants + as default variant. + + Args: + variant (Union[str, None]): Settings variant name. + """ + + self._default_settings_variant = variant + + default_settings_variant = property( + get_default_settings_variant, + set_default_settings_variant + ) + + def get_default_service_username(self): + """Default username used for callbacks when used with service API key. + + Returns: + Union[str, None]: Username if any was filled. + """ + + return self._as_user_stack.get_default_username() + + def set_default_service_username(self, username=None): + """Service API will work as other user. + + Service API keys can work as other user. It can be temporary using + context manager 'as_user' or it is possible to set default username if + 'as_user' context manager is not entered. + + Args: + username (Union[str, None]): Username to work as when service. + + Raises: + ValueError: When connection is not yet authenticated or api key + is not service token. + """ + + current_username = self._as_user_stack.get_default_username() + if current_username == username: + return + + if not self.has_valid_token: + raise ValueError( + "Authentication of connection did not happen yet." + ) + + if not self._access_token_is_service: + raise ValueError( + "Can't set service username. API key is not a service token." + ) + + self._as_user_stack.set_default_username(username) + if self._as_user_stack.username == username: + self._update_session_headers() + + @contextmanager + def as_username(self, username): + """Service API will temporarily work as other user. + + This method can be used only if service API key is logged in. + + Args: + username (Union[str, None]): Username to work as when service. + + Raises: + ValueError: When connection is not yet authenticated or api key + is not service token. + """ + + if not self.has_valid_token: + raise ValueError( + "Authentication of connection did not happen yet." + ) + + if not self._access_token_is_service: + raise ValueError( + "Can't set service username. API key is not a service token." + ) + + with self._as_user_stack.as_user(username) as o: + self._update_session_headers() + try: + yield o + finally: + self._update_session_headers() + + @property + def is_server_available(self): + if self._server_available is None: + response = requests.get(self._base_url) + self._server_available = response.status_code == 200 + return self._server_available + + @property + def has_valid_token(self): + if self._access_token is None: + return False + + if self._token_is_valid is None: + self.validate_token() + return self._token_is_valid + + def validate_server_availability(self): + if not self.is_server_available: + raise ServerNotReached("Server \"{}\" can't be reached".format( + self._base_url + )) + + def validate_token(self): + try: + # TODO add other possible validations + # - existence of 'user' key in info + # - validate that 'site_id' is in 'sites' in info + self.get_info() + self.get_user() + self._token_is_valid = True + + except UnauthorizedError: + self._token_is_valid = False + return self._token_is_valid + + def set_token(self, token): + self.reset_token() + self._access_token = token + self.get_user() + + def reset_token(self): + self._access_token = None + self._token_is_valid = None + self.close_session() + + def create_session(self): + if self._session is not None: + raise ValueError("Session is already created.") + + self._as_user_stack.clear() + # Validate token before session creation + self.validate_token() + + session = requests.Session() + session.headers.update(self.get_headers()) + + self._session_functions_mapping = { + RequestTypes.get: session.get, + RequestTypes.post: session.post, + RequestTypes.put: session.put, + RequestTypes.patch: session.patch, + RequestTypes.delete: session.delete + } + self._session = session + + def close_session(self): + if self._session is None: + return + + session = self._session + self._session = None + self._session_functions_mapping = {} + session.close() + + def _update_session_headers(self): + if self._session is None: + return + + # Header keys that may change over time + for key, value in ( + ("X-as-user", self._as_user_stack.username), + ("x-ayon-version", self._client_version), + ("x-ayon-site-id", self._site_id), + ): + if value is not None: + self._session.headers[key] = value + elif key in self._session.headers: + self._session.headers.pop(key) + + def get_info(self): + """Get information about current used api key. + + By default, the 'info' contains only 'uptime' and 'version'. With + logged user info also contains information about user and machines on + which was logged in. + + Todos: + Use this method for validation of token instead of 'get_user'. + + Returns: + Dict[str, Any]: Information from server. + """ + + response = self.get("info") + return response.data + + def _get_user_info(self): + if self._access_token is None: + return None + + if self._access_token_is_service is not None: + response = self.get("users/me") + return response.data + + self._access_token_is_service = False + response = self.get("users/me") + if response.status == 200: + return response.data + + self._access_token_is_service = True + response = self.get("users/me") + if response.status == 200: + return response.data + + self._access_token_is_service = None + return None + + def get_users(self): + # TODO how to find out if user have permission? + users = self.get("users") + return users.data + + def get_user(self, username=None): + output = None + if username is None: + output = self._get_user_info() + else: + response = self.get("users/{}".format(username)) + if response.status == 200: + output = response.data + + if output is None: + raise UnauthorizedError("User is not authorized.") + return output + + def get_headers(self, content_type=None): + if content_type is None: + content_type = "application/json" + + headers = { + "Content-Type": content_type, + "x-ayon-platform": platform.system().lower(), + "x-ayon-hostname": platform.node(), + } + if self._site_id is not None: + headers["x-ayon-site-id"] = self._site_id + + if self._client_version is not None: + headers["x-ayon-version"] = self._client_version + + if self._access_token: + if self._access_token_is_service: + headers["X-Api-Key"] = self._access_token + username = self._as_user_stack.username + if username: + headers["X-as-user"] = username + else: + headers["Authorization"] = "Bearer {}".format( + self._access_token) + return headers + + def login(self, username, password): + if self.has_valid_token: + try: + user_info = self.get_user() + except UnauthorizedError: + user_info = {} + + current_username = user_info.get("name") + if current_username == username: + self.close_session() + self.create_session() + return + + self.reset_token() + + self.validate_server_availability() + + response = self.post( + "auth/login", + name=username, + password=password + ) + if response.status_code != 200: + _detail = response.data.get("detail") + details = "" + if _detail: + details = " {}".format(_detail) + + raise AuthenticationError("Login failed {}".format(details)) + + self._access_token = response["token"] + + if not self.has_valid_token: + raise AuthenticationError("Invalid credentials") + self.create_session() + + def logout(self, soft=False): + if self._access_token: + if not soft: + self._logout() + self.reset_token() + + def _logout(self): + logout_from_server(self._base_url, self._access_token) + + def _do_rest_request(self, function, url, **kwargs): + if self._session is None: + if "headers" not in kwargs: + kwargs["headers"] = self.get_headers() + + if isinstance(function, RequestType): + function = self._base_functions_mapping[function] + + elif isinstance(function, RequestType): + function = self._session_functions_mapping[function] + + try: + response = function(url, **kwargs) + + except ConnectionRefusedError: + new_response = RestApiResponse( + None, + {"detail": "Unable to connect the server. Connection refused"} + ) + except requests.exceptions.ConnectionError: + new_response = RestApiResponse( + None, + {"detail": "Unable to connect the server. Connection error"} + ) + else: + content_type = response.headers.get("Content-Type") + if content_type == "application/json": + try: + new_response = RestApiResponse(response) + except JSONDecodeError: + new_response = RestApiResponse( + None, + { + "detail": "The response is not a JSON: {}".format( + response.text) + } + ) + + elif content_type in ("image/jpeg", "image/png"): + new_response = RestApiResponse(response) + + else: + new_response = RestApiResponse(response) + + self.log.debug("Response {}".format(str(new_response))) + return new_response + + def raw_post(self, entrypoint, **kwargs): + entrypoint = entrypoint.lstrip("/").rstrip("/") + self.log.debug("Executing [POST] {}".format(entrypoint)) + url = "{}/{}".format(self._rest_url, entrypoint) + return self._do_rest_request( + RequestTypes.post, + url, + **kwargs + ) + + def raw_put(self, entrypoint, **kwargs): + entrypoint = entrypoint.lstrip("/").rstrip("/") + self.log.debug("Executing [PUT] {}".format(entrypoint)) + url = "{}/{}".format(self._rest_url, entrypoint) + return self._do_rest_request( + RequestTypes.put, + url, + **kwargs + ) + + def raw_patch(self, entrypoint, **kwargs): + entrypoint = entrypoint.lstrip("/").rstrip("/") + self.log.debug("Executing [PATCH] {}".format(entrypoint)) + url = "{}/{}".format(self._rest_url, entrypoint) + return self._do_rest_request( + RequestTypes.patch, + url, + **kwargs + ) + + def raw_get(self, entrypoint, **kwargs): + entrypoint = entrypoint.lstrip("/").rstrip("/") + self.log.debug("Executing [GET] {}".format(entrypoint)) + url = "{}/{}".format(self._rest_url, entrypoint) + return self._do_rest_request( + RequestTypes.get, + url, + **kwargs + ) + + def raw_delete(self, entrypoint, **kwargs): + entrypoint = entrypoint.lstrip("/").rstrip("/") + self.log.debug("Executing [DELETE] {}".format(entrypoint)) + url = "{}/{}".format(self._rest_url, entrypoint) + return self._do_rest_request( + RequestTypes.delete, + url, + **kwargs + ) + + def post(self, entrypoint, **kwargs): + return self.raw_post(entrypoint, json=kwargs) + + def put(self, entrypoint, **kwargs): + return self.raw_put(entrypoint, json=kwargs) + + def patch(self, entrypoint, **kwargs): + return self.raw_patch(entrypoint, json=kwargs) + + def get(self, entrypoint, **kwargs): + return self.raw_get(entrypoint, params=kwargs) + + def delete(self, entrypoint, **kwargs): + return self.raw_delete(entrypoint, params=kwargs) + + def get_event(self, event_id): + """Query full event data by id. + + Events received using event server do not contain full information. To + get the full event information is required to receive it explicitly. + + Args: + event_id (str): Id of event. + + Returns: + Dict[str, Any]: Full event data. + """ + + response = self.get("events/{}".format(event_id)) + response.raise_for_status() + return response.data + + def get_events( + self, + topics=None, + project_names=None, + states=None, + users=None, + include_logs=None, + fields=None + ): + """Get events from server with filtering options. + + Notes: + Not all event happen on a project. + + Args: + topics (Iterable[str]): Name of topics. + project_names (Iterable[str]): Project on which event happened. + states (Iterable[str]): Filtering by states. + users (Iterable[str]): Filtering by users who created/triggered + an event. + include_logs (bool): Query also log events. + fields (Union[Iterable[str], None]): Fields that should be received + for each event. + + Returns: + Generator[Dict[str, Any]]: Available events matching filters. + """ + + filters = {} + if topics is not None: + topics = set(topics) + if not topics: + return + filters["eventTopics"] = list(topics) + + if project_names is not None: + project_names = set(project_names) + if not project_names: + return + filters["projectName"] = list(project_names) + + if states is not None: + states = set(states) + if not states: + return + filters["eventStates"] = list(states) + + if users is not None: + users = set(users) + if not users: + return + filters["eventUsers"] = list(users) + + if include_logs is None: + include_logs = False + filters["includeLogsFilter"] = include_logs + + if not fields: + fields = DEFAULT_EVENT_FIELDS + + query = events_graphql_query(set(fields)) + for attr, filter_value in filters.items(): + query.set_variable_value(attr, filter_value) + + for parsed_data in query.continuous_query(self): + for event in parsed_data["events"]: + yield event + + def update_event( + self, + event_id, + sender=None, + project_name=None, + status=None, + description=None, + summary=None, + payload=None + ): + kwargs = {} + for key, value in ( + ("sender", sender), + ("projectName", project_name), + ("status", status), + ("description", description), + ("summary", summary), + ("payload", payload), + ): + if value is not None: + kwargs[key] = value + response = self.patch( + "events/{}".format(event_id), + **kwargs + ) + response.raise_for_status() + + def dispatch_event( + self, + topic, + sender=None, + event_hash=None, + project_name=None, + username=None, + dependencies=None, + description=None, + summary=None, + payload=None, + finished=True, + store=True, + ): + """Dispatch event to server. + + Arg: + topic (str): Event topic used for filtering of listeners. + sender (Optional[str]): Sender of event. + hash (Optional[str]): Event hash. + project_name (Optional[str]): Project name. + username (Optional[str]): Username which triggered event. + dependencies (Optional[list[str]]): List of event id deprendencies. + description (Optional[str]): Description of event. + summary (Optional[dict[str, Any]]): Summary of event that can be used + for simple filtering on listeners. + payload (Optional[dict[str, Any]]): Full payload of event data with + all details. + finished (bool): Mark event as finished on dispatch. + store (bool): Store event in event queue for possible future processing + otherwise is event send only to active listeners. + """ + + if summary is None: + summary = {} + if payload is None: + payload = {} + event_data = { + "topic": topic, + "sender": sender, + "hash": event_hash, + "project": project_name, + "user": username, + "dependencies": dependencies, + "description": description, + "summary": summary, + "payload": payload, + "finished": finished, + "store": store, + } + if self.post("events", **event_data): + self.log.debug("Dispatched event {}".format(topic)) + return True + self.log.error("Unable to dispatch event {}".format(topic)) + return False + + def enroll_event_job( + self, + source_topic, + target_topic, + sender, + description=None, + sequential=None + ): + """Enroll job based on events. + + Enroll will find first unprocessed event with 'source_topic' and will + create new event with 'target_topic' for it and return the new event + data. + + Use 'sequential' to control that only single target event is created + at same time. Creation of new target events is blocked while there is + at least one unfinished event with target topic, when set to 'True'. + This helps when order of events matter and more than one process using + the same target is running at the same time. + - Make sure the new event has updated status to '"finished"' status + when you're done with logic + + Target topic should not clash with other processes/services. + + Created target event have 'dependsOn' key where is id of source topic. + + Use-case: + - Service 1 is creating events with topic 'my.leech' + - Service 2 process 'my.leech' and uses target topic 'my.process' + - this service can run on 1..n machines + - all events must be processed in a sequence by their creation + time and only one event can be processed at a time + - in this case 'sequential' should be set to 'True' so only + one machine is actually processing events, but if one goes + down there are other that can take place + - Service 3 process 'my.leech' and uses target topic 'my.discover' + - this service can run on 1..n machines + - order of events is not important + - 'sequential' should be 'False' + + Args: + source_topic (str): Source topic to enroll. + target_topic (str): Topic of dependent event. + sender (str): Identifier of sender (e.g. service name or username). + description (str): Human readable text shown in target event. + sequential (bool): The source topic must be processed in sequence. + + Returns: + Union[None, dict[str, Any]]: None if there is no event matching + filters. Created event with 'target_topic'. + """ + + kwargs = { + "sourceTopic": source_topic, + "targetTopic": target_topic, + "sender": sender, + } + if sequential is not None: + kwargs["sequential"] = sequential + if description is not None: + kwargs["description"] = description + response = self.post("enroll", **kwargs) + if response.status_code == 204: + return None + elif response.status_code >= 400: + self.log.error(response.text) + return None + + return response.data + + def _download_file(self, url, filepath, chunk_size, progress): + dst_directory = os.path.dirname(filepath) + if not os.path.exists(dst_directory): + os.makedirs(dst_directory) + + kwargs = {"stream": True} + if self._session is None: + kwargs["headers"] = self.get_headers() + get_func = self._base_functions_mapping[RequestTypes.get] + else: + get_func = self._session_functions_mapping[RequestTypes.get] + + with open(filepath, "wb") as f_stream: + with get_func(url, **kwargs) as response: + response.raise_for_status() + progress.set_content_size(response.headers["Content-length"]) + for chunk in response.iter_content(chunk_size=chunk_size): + f_stream.write(chunk) + progress.add_transferred_chunk(len(chunk)) + + def download_file(self, endpoint, filepath, chunk_size=None, progress=None): + """Download file from AYON server. + + Endpoint can be full url (must start with 'base_url' of api object). + + Progress object can be used to track download. Can be used when + download happens in thread and other thread want to catch changes over + time. + + Args: + endpoint (str): Endpoint or URL to file that should be downloaded. + filepath (str): Path where file will be downloaded. + chunk_size (int): Size of chunks that are received in single loop. + progress (TransferProgress): Object that gives ability to track + download progress. + """ + + if not chunk_size: + # 1 MB chunk by default + chunk_size = 1024 * 1024 + + if endpoint.startswith(self._base_url): + url = endpoint + else: + endpoint = endpoint.lstrip("/").rstrip("/") + url = "{}/{}".format(self._rest_url, endpoint) + + # Create dummy object so the function does not have to check + # 'progress' variable everywhere + if progress is None: + progress = TransferProgress() + + progress.set_source_url(url) + progress.set_destination_url(filepath) + progress.set_started() + try: + self._download_file(url, filepath, chunk_size, progress) + + except Exception as exc: + progress.set_failed(str(exc)) + raise + + finally: + progress.set_transfer_done() + return progress + + def _upload_file(self, url, filepath, progress): + kwargs = {} + if self._session is None: + kwargs["headers"] = self.get_headers() + post_func = self._base_functions_mapping[RequestTypes.post] + else: + post_func = self._session_functions_mapping[RequestTypes.post] + + with open(filepath, "rb") as stream: + stream.seek(0, io.SEEK_END) + size = stream.tell() + stream.seek(0) + progress.set_content_size(size) + response = post_func(url, data=stream, **kwargs) + response.raise_for_status() + progress.set_transferred_size(size) + + def upload_file(self, endpoint, filepath, progress=None): + """Upload file to server. + + Todos: + Uploading with more detailed progress. + + Args: + endpoint (str): Endpoint or url where file will be uploaded. + filepath (str): Source filepath. + progress (TransferProgress): Object that gives ability to track + upload progress. + """ + + if endpoint.startswith(self._base_url): + url = endpoint + else: + endpoint = endpoint.lstrip("/").rstrip("/") + url = "{}/{}".format(self._rest_url, endpoint) + + # Create dummy object so the function does not have to check + # 'progress' variable everywhere + if progress is None: + progress = TransferProgress() + + progress.set_source_url(filepath) + progress.set_destination_url(url) + progress.set_started() + + try: + self._upload_file(url, filepath, progress) + + except Exception as exc: + progress.set_failed(str(exc)) + raise + + finally: + progress.set_transfer_done() + + def trigger_server_restart(self): + """Trigger server restart. + + Restart may be required when a change of specific value happened on + server. + """ + + result = self.post("system/restart") + if result.status_code != 204: + # TODO add better exception + raise ValueError("Failed to restart server") + + def query_graphql(self, query, variables=None): + """Execute GraphQl query. + + Args: + query (str): GraphQl query string. + variables (Union[None, dict[str, Any]): Variables that can be + used in query. + + Returns: + GraphQlResponse: Response from server. + """ + + data = {"query": query, "variables": variables or {}} + response = self._do_rest_request( + RequestTypes.post, + self._graphl_url, + json=data + ) + response.raise_for_status() + return GraphQlResponse(response) + + def get_graphql_schema(self): + return self.query_graphql(INTROSPECTION_QUERY).data + + def get_server_schema(self): + """Get server schema with info, url paths, components etc. + + Todos: + Cache schema - How to find out it is outdated? + + Returns: + Dict[str, Any]: Full server schema. + """ + + url = "{}/openapi.json".format(self._base_url) + response = self._do_rest_request(RequestTypes.get, url) + if response: + return response.data + return None + + def get_schemas(self): + """Get components schema. + + Name of components does not match entity type names e.g. 'project' is + under 'ProjectModel'. We should find out some mapping. Also there + are properties which don't have information about reference to object + e.g. 'config' has just object definition without reference schema. + + Returns: + Dict[str, Any]: Component schemas. + """ + + server_schema = self.get_server_schema() + return server_schema["components"]["schemas"] + + def get_attributes_schema(self, use_cache=True): + if not use_cache: + self.reset_attributes_schema() + + if self._attributes_schema is None: + result = self.get("attributes") + if result.status_code != 200: + raise UnauthorizedError( + "User must be authorized to receive attributes" + ) + self._attributes_schema = result.data + return copy.deepcopy(self._attributes_schema) + + def reset_attributes_schema(self): + self._attributes_schema = None + self._entity_type_attributes_cache = {} + + def set_attribute_config( + self, attribute_name, data, scope, position=None, builtin=False + ): + if position is None: + attributes = self.get("attributes").data["attributes"] + origin_attr = next( + ( + attr for attr in attributes + if attr["name"] == attribute_name + ), + None + ) + if origin_attr: + position = origin_attr["position"] + else: + position = len(attributes) + + response = self.put( + "attributes/{}".format(attribute_name), + data=data, + scope=scope, + position=position, + builtin=builtin + ) + if response.status_code != 204: + # TODO raise different exception + raise ValueError( + "Attribute \"{}\" was not created/updated. {}".format( + attribute_name, response.detail + ) + ) + + self.reset_attributes_schema() + + def remove_attribute_config(self, attribute_name): + """Remove attribute from server. + + This can't be un-done, please use carefully. + + Args: + attribute_name (str): Name of attribute to remove. + """ + + response = self.delete("attributes/{}".format(attribute_name)) + if response.status_code != 204: + # TODO raise different exception + raise ValueError( + "Attribute \"{}\" was not created/updated. {}".format( + attribute_name, response.detail + ) + ) + + self.reset_attributes_schema() + + def get_attributes_for_type(self, entity_type): + """Get attribute schemas available for an entity type. + + ``` + # Example attribute schema + { + # Common + "type": "integer", + "title": "Clip Out", + "description": null, + "example": 1, + "default": 1, + # These can be filled based on value of 'type' + "gt": null, + "ge": null, + "lt": null, + "le": null, + "minLength": null, + "maxLength": null, + "minItems": null, + "maxItems": null, + "regex": null, + "enum": null + } + ``` + + Args: + entity_type (str): Entity type for which should be attributes + received. + + Returns: + Dict[str, Dict[str, Any]]: Attribute schemas that are available + for entered entity type. + """ + attributes = self._entity_type_attributes_cache.get(entity_type) + if attributes is None: + attributes_schema = self.get_attributes_schema() + attributes = {} + for attr in attributes_schema["attributes"]: + if entity_type not in attr["scope"]: + continue + attr_name = attr["name"] + attributes[attr_name] = attr["data"] + + self._entity_type_attributes_cache[entity_type] = attributes + + return copy.deepcopy(attributes) + + def get_default_fields_for_type(self, entity_type): + """Default fields for entity type. + + Returns most of commonly used fields from server. + + Args: + entity_type (str): Name of entity type. + + Returns: + set[str]: Fields that should be queried from server. + """ + + attributes = self.get_attributes_for_type(entity_type) + if entity_type == "project": + return DEFAULT_PROJECT_FIELDS | { + "attrib.{}".format(attr) + for attr in attributes + } + + if entity_type == "folder": + return DEFAULT_FOLDER_FIELDS | { + "attrib.{}".format(attr) + for attr in attributes + } + + if entity_type == "task": + return DEFAULT_TASK_FIELDS | { + "attrib.{}".format(attr) + for attr in attributes + } + + if entity_type == "subset": + return DEFAULT_SUBSET_FIELDS | { + "attrib.{}".format(attr) + for attr in attributes + } + + if entity_type == "version": + return DEFAULT_VERSION_FIELDS | { + "attrib.{}".format(attr) + for attr in attributes + } + + if entity_type == "representation": + return ( + DEFAULT_REPRESENTATION_FIELDS + | REPRESENTATION_FILES_FIELDS + | { + "attrib.{}".format(attr) + for attr in attributes + } + ) + + raise ValueError("Unknown entity type \"{}\"".format(entity_type)) + + def get_addons_info(self, details=True): + """Get information about addons available on server. + + Args: + details (bool): Detailed data with information how to get client + code. + """ + + endpoint = "addons" + if details: + endpoint += "?details=1" + response = self.get(endpoint) + response.raise_for_status() + return response.data + + def download_addon_private_file( + self, + addon_name, + addon_version, + filename, + destination_dir, + destination_filename=None, + chunk_size=None, + progress=None, + ): + """Download a file from addon private files. + + This method requires to have authorized token available. Private files + are not under '/api' restpoint. + + Args: + addon_name (str): Addon name. + addon_version (str): Addon version. + filename (str): Filename in private folder on server. + destination_dir (str): Where the file should be downloaded. + destination_filename (str): Name of destination filename. Source + filename is used if not passed. + chunk_size (int): Download chunk size. + progress (TransferProgress): Object that gives ability to track + download progress. + + Returns: + str: Filepath to downloaded file. + """ + + if not destination_filename: + destination_filename = filename + dst_filepath = os.path.join(destination_dir, destination_filename) + # Filename can contain "subfolders" + dst_dirpath = os.path.dirname(dst_filepath) + if not os.path.exists(dst_dirpath): + os.makedirs(dst_dirpath) + + url = "{}/addons/{}/{}/private/{}".format( + self._base_url, + addon_name, + addon_version, + filename + ) + self.download_file( + url, dst_filepath, chunk_size=chunk_size, progress=progress + ) + return dst_filepath + + def get_dependencies_info(self): + """Information about dependency packages on server. + + Example data structure: + { + "packages": [ + { + "name": str, + "platform": str, + "checksum": str, + "sources": list[dict[str, Any]], + "supportedAddons": dict[str, str], + "pythonModules": dict[str, str] + } + ], + "productionPackage": str + } + + Returns: + dict[str, Any]: Information about dependency packages known for + server. + """ + + result = self.get("dependencies") + return result.data + + def update_dependency_info( + self, + name, + platform_name, + size, + checksum, + checksum_algorithm=None, + supported_addons=None, + python_modules=None, + sources=None + ): + """Update or create dependency package infor by it's identifiers. + + The endpoint can be used to create or update dependency package. + + Args: + name (str): Name of dependency package. + platform_name (Literal["windows", "linux", "darwin"]): Platform for + which is dependency package targeted. + size (int): Size of dependency package in bytes. + checksum (str): Checksum of archive file where dependecies are. + checksum_algorithm (str): Algorithm used to calculate checksum. + By default, is used 'md5' (defined by server). + supported_addons (Dict[str, str]): Name of addons for which was the + package created ('{"": "", ...}'). + python_modules (Dict[str, str]): Python modules in dependencies + package ('{"": "", ...}'). + sources (List[Dict[str, Any]]): Information about sources where + dependency package is available. + """ + + kwargs = { + key: value + for key, value in ( + ("checksumAlgorithm", checksum_algorithm), + ("supportedAddons", supported_addons), + ("pythonModules", python_modules), + ("sources", sources), + ) + if value + } + + response = self.put( + "dependencies", + name=name, + platform=platform_name, + size=size, + checksum=checksum, + **kwargs + ) + if response.status not in (200, 201): + raise ServerError("Failed to create/update dependency") + return response.data + + def download_dependency_package( + self, + package_name, + dst_directory, + filename, + platform_name=None, + chunk_size=None, + progress=None, + ): + """Download dependency package from server. + + This method requires to have authorized token available. The package is + only downloaded. + + Args: + package_name (str): Name of package to download. + dst_directory (str): Where the file should be downloaded. + filename (str): Name of destination filename. + platform_name (str): Name of platform for which the dependency + package is targetter. Default value is current platform. + chunk_size (int): Download chunk size. + progress (TransferProgress): Object that gives ability to track + download progress. + + Returns: + str: Filepath to downloaded file. + """ + if platform_name is None: + platform_name = platform.system().lower() + + package_filepath = os.path.join(dst_directory, filename) + self.download_file( + "dependencies/{}/{}".format(package_name, platform_name), + package_filepath, + chunk_size=chunk_size, + progress=progress + ) + return package_filepath + + def upload_dependency_package( + self, filepath, package_name, platform_name=None, progress=None + ): + """Upload dependency package to server. + + Args: + filepath (str): Path to a package file. + package_name (str): Name of package. Must be unique. + platform_name (str): For which platform is the package targeted. + progress (Optional[TransferProgress]): Object to keep track about + upload state. + """ + + if platform_name is None: + platform_name = platform.system().lower() + + self.upload_file( + "dependencies/{}/{}".format(package_name, platform_name), + filepath, + progress=progress + ) + + def delete_dependency_package(self, package_name, platform_name=None): + """Remove dependency package for specific platform. + + Args: + package_name (str): Name of package to remove. + platform_name (Optional[str]): Which platform of the package should + be removed. Current platform is used if not passed. + """ + + if platform_name is None: + platform_name = platform.system().lower() + + response = self.delete( + "dependencies/{}/{}".format(package_name, platform_name), + ) + if response.status != 200: + raise ServerError("Failed to delete dependency file") + return response.data + + # Anatomy presets + def get_project_anatomy_presets(self): + """Anatomy presets available on server. + + Content has basic information about presets. Example output: + [ + { + "name": "netflix_VFX", + "primary": false, + "version": "1.0.0" + }, + { + ... + }, + ... + ] + + Returns: + list[dict[str, str]]: Anatomy presets available on server. + """ + + result = self.get("anatomy/presets") + result.raise_for_status() + return result.data.get("presets") or [] + + def get_project_anatomy_preset(self, preset_name=None): + """Anatomy preset values by name. + + Get anatomy preset values by preset name. Primary preset is returned + if preset name is set to 'None'. + + Args: + Union[str, None]: Preset name. + + Returns: + dict[str, Any]: Anatomy preset values. + """ + + if preset_name is None: + preset_name = "_" + result = self.get("anatomy/presets/{}".format(preset_name)) + result.raise_for_status() + return result.data + + def get_project_roots_by_site(self, project_name): + """Root overrides per site name. + + Method is based on logged user and can't be received for any other + user on server. + + Output will contain only roots per site id used by logged user. + + Args: + project_name (str): Name of project. + + Returns: + dict[str, dict[str, str]]: Root values by root name by site id. + """ + + result = self.get("projects/{}/roots".format(project_name)) + result.raise_for_status() + return result.data + + def get_project_roots_for_site(self, project_name, site_id=None): + """Root overrides for site. + + If site id is not passed a site set in current api object is used + instead. + + Args: + project_name (str): Name of project. + site_id (Optional[str]): Id of site for which want to receive + site overrides. + + Returns: + dict[str, str]: Root values by root name or None if + site does not have overrides. + """ + + if site_id is None: + site_id = self.site_id + + if site_id is None: + return {} + roots = self.get_project_roots_by_site(project_name) + return roots.get(site_id, {}) + + def get_addon_settings_schema( + self, addon_name, addon_version, project_name=None + ): + """Sudio/Project settings schema of an addon. + + Project schema may look differently as some enums are based on project + values. + + Args: + addon_name (str): Name of addon. + addon_version (str): Version of addon. + project_name (Union[str, None]): Schema for specific project or + default studio schemas. + + Returns: + dict[str, Any]: Schema of studio/project settings. + """ + + endpoint = "addons/{}/{}/schema".format(addon_name, addon_version) + if project_name: + endpoint += "/{}".format(project_name) + result = self.get(endpoint) + result.raise_for_status() + return result.data + + def get_addon_site_settings_schema(self, addon_name, addon_version): + """Site settings schema of an addon. + + Args: + addon_name (str): Name of addon. + addon_version (str): Version of addon. + + Returns: + dict[str, Any]: Schema of site settings. + """ + + result = self.get("addons/{}/{}/siteSettings/schema".format( + addon_name, addon_version + )) + result.raise_for_status() + return result.data + + def get_addon_studio_settings( + self, + addon_name, + addon_version, + variant=None + ): + """Addon studio settings. + + Receive studio settings for specific version of an addon. + + Args: + addon_name (str): Name of addon. + addon_version (str): Version of addon. + variant (str): Name of settings variant. By default, is used + 'default_settings_variant' passed on init. + + Returns: + dict[str, Any]: Addon settings. + """ + + if variant is None: + variant = self.default_settings_variant + + query_items = {} + if variant: + query_items["variant"] = variant + query = prepare_query_string(query_items) + + result = self.get( + "addons/{}/{}/settings{}".format(addon_name, addon_version, query) + ) + result.raise_for_status() + return result.data + + def get_addon_project_settings( + self, + addon_name, + addon_version, + project_name, + variant=None, + site_id=None, + use_site=True + ): + """Addon project settings. + + Receive project settings for specific version of an addon. The settings + may be with site overrides when enabled. + + Site id is filled with current connection site id if not passed. To + make sure any site id is used set 'use_site' to 'False'. + + Args: + addon_name (str): Name of addon. + addon_version (str): Version of addon. + project_name (str): Name of project for which the settings are + received. + variant (str): Name of settings variant. By default, is used + 'production'. + site_id (str): Name of site which is used for site overrides. Is + filled with connection 'site_id' attribute if not passed. + use_site (bool): To force disable option of using site overrides + set to 'False'. In that case won't be applied any site + overrides. + + Returns: + dict[str, Any]: Addon settings. + """ + + if not use_site: + site_id = None + elif not site_id: + site_id = self.site_id + + query_items = {} + if site_id: + query_items["site"] = site_id + + if variant is None: + variant = self.default_settings_variant + + if variant: + query_items["variant"] = variant + + query = prepare_query_string(query_items) + result = self.get( + "addons/{}/{}/settings/{}{}".format( + addon_name, addon_version, project_name, query + ) + ) + result.raise_for_status() + return result.data + + def get_addon_settings( + self, + addon_name, + addon_version, + project_name=None, + variant=None, + site_id=None, + use_site=True + ): + """Receive addon settings. + + Receive addon settings based on project name value. Some arguments may + be ignored if 'project_name' is set to 'None'. + + Args: + addon_name (str): Name of addon. + addon_version (str): Version of addon. + project_name (str): Name of project for which the settings are + received. A studio settings values are received if is 'None'. + variant (str): Name of settings variant. By default, is used + 'production'. + site_id (str): Name of site which is used for site overrides. Is + filled with connection 'site_id' attribute if not passed. + use_site (bool): To force disable option of using site overrides + set to 'False'. In that case won't be applied any site + overrides. + + Returns: + dict[str, Any]: Addon settings. + """ + + if project_name is None: + return self.get_addon_studio_settings( + addon_name, addon_version, variant + ) + return self.get_addon_project_settings( + addon_name, addon_version, project_name, variant, site_id, use_site + ) + + def get_addon_site_settings( + self, addon_name, addon_version, site_id=None + ): + """Site settings of an addon. + + If site id is not available an empty dictionary is returned. + + Args: + addon_name (str): Name of addon. + addon_version (str): Version of addon. + site_id (str): Name of site for which should be settings returned. + using 'site_id' attribute if not passed. + + Returns: + dict[str, Any]: Site settings. + """ + + if site_id is None: + site_id = self.site_id + + if not site_id: + return {} + + query = prepare_query_string({"site": site_id}) + result = self.get("addons/{}/{}/siteSettings{}".format( + addon_name, addon_version, query + )) + result.raise_for_status() + return result.data + + def get_addons_studio_settings(self, variant=None, only_values=True): + """All addons settings in one bulk. + + Args: + variant (Literal[production, staging]): Variant of settings. By + default, is used 'production'. + only_values (Optional[bool]): Output will contain only settings + values without metadata about addons. + + Returns: + dict[str, Any]: Settings of all addons on server. + """ + + query_values = {} + if variant: + query_values["variant"] = variant + query = prepare_query_string(query_values) + response = self.get("settings/addons{}".format(query)) + response.raise_for_status() + output = response.data + if only_values: + output = output["settings"] + return output + + def get_addons_project_settings( + self, + project_name, + variant=None, + site_id=None, + use_site=True, + only_values=True + ): + """Project settings of all addons. + + Server returns information about used addon versions, so full output + looks like: + { + "settings": {...}, + "addons": {...} + } + + The output can be limited to only values. To do so is 'only_values' + argument which is by default set to 'True'. In that case output + contains only value of 'settings' key. + + Args: + project_name (str): Name of project for which are settings + received. + variant (Optional[Literal[production, staging]]): Variant of + settings. By default, is used 'production'. + site_id (Optional[str]): Id of site for which want to receive + site overrides. + use_site (bool): To force disable option of using site overrides + set to 'False'. In that case won't be applied any site + overrides. + only_values (Optional[bool]): Output will contain only settings + values without metadata about addons. + + Returns: + dict[str, Any]: Settings of all addons on server for passed + project. + """ + + query_values = { + "project": project_name + } + if variant: + query_values["variant"] = variant + + if use_site: + if not site_id: + site_id = self.default_settings_variant + if site_id: + query_values["site"] = site_id + query = prepare_query_string(query_values) + response = self.get("settings/addons{}".format(query)) + response.raise_for_status() + output = response.data + if only_values: + output = output["settings"] + return output + + def get_addons_settings( + self, + project_name=None, + variant=None, + site_id=None, + use_site=True, + only_values=True + ): + """Universal function to receive all addon settings. + + Based on 'project_name' will receive studio settings or project + settings. In case project is not passed is 'site_id' ignored. + + Args: + project_name (Optional[str]): Name of project for which should be + settings received. + variant (Optional[Literal[production, staging]]): Settings variant. + By default, is used 'production'. + site_id (Optional[str]): Id of site for which want to receive + site overrides. + use_site (bool): To force disable option of using site overrides + set to 'False'. In that case won't be applied any site + overrides. + only_values (Optional[bool]): Only settings values will be + returned. By default, is set to 'True'. + """ + + if project_name is None: + return self.get_addons_studio_settings(variant, only_values) + + return self.get_addons_project_settings( + project_name, variant, site_id, use_site, only_values + ) + + # Entity getters + def get_rest_project(self, project_name): + """Query project by name. + + This call returns project with anatomy data. + + Args: + project_name (str): Name of project. + + Returns: + Union[Dict[str, Any], None]: Project entity data or 'None' if + project was not found. + """ + + if not project_name: + return None + + response = self.get("projects/{}".format(project_name)) + if response.status == 200: + return response.data + return None + + def get_rest_projects(self, active=True, library=None): + """Query available project entities. + + User must be logged in. + + Args: + active (Union[bool, None]): Filter active/inactive projects. Both + are returned if 'None' is passed. + library (bool): Filter standard/library projects. Both are + returned if 'None' is passed. + + Returns: + Generator[Dict[str, Any]]: Available projects. + """ + + for project_name in self.get_project_names(active, library): + project = self.get_rest_project(project_name) + if project: + yield project + + def get_rest_entity_by_id(self, project_name, entity_type, entity_id): + """Get entity using REST on a project by its id. + + Args: + project_name (str): Name of project where entity is. + entity_type (Literal["folder", "task", "subset", "version"]): The + entity type which should be received. + entity_id (str): Id of entity. + + Returns: + dict[str, Any]: Received entity data. + """ + + if not all((project_name, entity_type, entity_id)): + return None + + entity_endpoint = "{}s".format(entity_type) + response = self.get("projects/{}/{}/{}".format( + project_name, entity_endpoint, entity_id + )) + if response.status == 200: + return response.data + return None + + def get_rest_folder(self, project_name, folder_id): + return self.get_rest_entity_by_id(project_name, "folder", folder_id) + + def get_rest_task(self, project_name, task_id): + return self.get_rest_entity_by_id(project_name, "task", task_id) + + def get_rest_subset(self, project_name, subset_id): + return self.get_rest_entity_by_id(project_name, "subset", subset_id) + + def get_rest_version(self, project_name, version_id): + return self.get_rest_entity_by_id(project_name, "version", version_id) + + def get_rest_representation(self, project_name, representation_id): + return self.get_rest_entity_by_id( + project_name, "representation", representation_id + ) + + def get_project_names(self, active=True, library=None): + """Receive available project names. + + User must be logged in. + + Args: + active (Union[bool, None[): Filter active/inactive projects. Both + are returned if 'None' is passed. + library (bool): Filter standard/library projects. Both are + returned if 'None' is passed. + + Returns: + List[str]: List of available project names. + """ + + query_keys = {} + if active is not None: + query_keys["active"] = "true" if active else "false" + + if library is not None: + query_keys["library"] = "true" if active else "false" + query = "" + if query_keys: + query = "?{}".format(",".join([ + "{}={}".format(key, value) + for key, value in query_keys.items() + ])) + + response = self.get("projects{}".format(query), **query_keys) + response.raise_for_status() + data = response.data + project_names = [] + if data: + for project in data["projects"]: + project_names.append(project["name"]) + return project_names + + def get_projects( + self, active=True, library=None, fields=None, own_attributes=False + ): + """Get projects. + + Args: + active (Union[bool, None]): Filter active or inactive projects. + Filter is disabled when 'None' is passed. + library (Union[bool, None]): Filter library projects. Filter is + disabled when 'None' is passed. + fields (Union[Iterable[str], None]): fields to be queried + for project. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Generator[Dict[str, Any]]: Queried projects. + """ + + if fields is None: + use_rest = True + else: + use_rest = False + fields = set(fields) + if own_attributes: + fields.add("ownAttrib") + for field in fields: + if field.startswith("config"): + use_rest = True + break + + if use_rest: + for project in self.get_rest_projects(active, library): + if own_attributes: + fill_own_attribs(project) + yield project + + else: + query = projects_graphql_query(fields) + for parsed_data in query.continuous_query(self): + for project in parsed_data["projects"]: + if own_attributes: + fill_own_attribs(project) + yield project + + def get_project(self, project_name, fields=None, own_attributes=False): + """Get project. + + Args: + project_name (str): Name of project. + fields (Union[Iterable[str], None]): fields to be queried + for project. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[Dict[str, Any], None]: Project entity data or None + if project was not found. + """ + + use_rest = True + if fields is not None: + use_rest = False + _fields = set() + for field in fields: + if field.startswith("config") or field == "data": + use_rest = True + break + _fields.add(field) + + fields = _fields + + if use_rest: + project = self.get_rest_project(project_name) + if own_attributes: + fill_own_attribs(project) + return project + + if own_attributes: + field.add("ownAttrib") + query = project_graphql_query(fields) + query.set_variable_value("projectName", project_name) + + parsed_data = query.query(self) + + project = parsed_data["project"] + if project is not None: + project["name"] = project_name + if own_attributes: + fill_own_attribs(project) + return project + + def get_folders( + self, + project_name, + folder_ids=None, + folder_paths=None, + folder_names=None, + parent_ids=None, + active=True, + fields=None, + own_attributes=False + ): + """Query folders from server. + + Todos: + Folder name won't be unique identifier so we should add folder path + filtering. + + Notes: + Filter 'active' don't have direct filter in GraphQl. + + Args: + project_name (str): Name of project. + folder_ids (Iterable[str]): Folder ids to filter. + folder_paths (Iterable[str]): Folder paths used for filtering. + folder_names (Iterable[str]): Folder names used for filtering. + parent_ids (Iterable[str]): Ids of folder parents. Use 'None' + if folder is direct child of project. + active (Union[bool, None]): Filter active/inactive folders. + Both are returned if is set to None. + fields (Union[Iterable[str], None]): Fields to be queried for + folder. All possible folder fields are returned + if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Generator[dict[str, Any]]: Queried folder entities. + """ + + if not project_name: + return + + filters = { + "projectName": project_name + } + if folder_ids is not None: + folder_ids = set(folder_ids) + if not folder_ids: + return + filters["folderIds"] = list(folder_ids) + + if folder_paths is not None: + folder_paths = set(folder_paths) + if not folder_paths: + return + filters["folderPaths"] = list(folder_paths) + + if folder_names is not None: + folder_names = set(folder_names) + if not folder_names: + return + filters["folderNames"] = list(folder_names) + + if parent_ids is not None: + parent_ids = set(parent_ids) + if not parent_ids: + return + if None in parent_ids: + # Replace 'None' with '"root"' which is used during GraphQl + # query for parent ids filter for folders without folder + # parent + parent_ids.remove(None) + parent_ids.add("root") + + if project_name in parent_ids: + # Replace project name with '"root"' which is used during + # GraphQl query for parent ids filter for folders without + # folder parent + parent_ids.remove(project_name) + parent_ids.add("root") + + filters["parentFolderIds"] = list(parent_ids) + + if fields: + fields = set(fields) + else: + fields = self.get_default_fields_for_type("folder") + + use_rest = False + if "data" in fields: + use_rest = True + fields = {"id"} + + if active is not None: + fields.add("active") + + if own_attributes and not use_rest: + fields.add("ownAttrib") + + query = folders_graphql_query(fields) + for attr, filter_value in filters.items(): + query.set_variable_value(attr, filter_value) + + for parsed_data in query.continuous_query(self): + for folder in parsed_data["project"]["folders"]: + if active is not None and active is not folder["active"]: + continue + + if use_rest: + folder = self.get_rest_folder(project_name, folder["id"]) + + if own_attributes: + fill_own_attribs(folder) + yield folder + + def get_tasks( + self, + project_name, + task_ids=None, + task_names=None, + task_types=None, + folder_ids=None, + active=True, + fields=None, + own_attributes=False + ): + """Query task entities from server. + + Args: + project_name (str): Name of project. + task_ids (Iterable[str]): Task ids to filter. + task_names (Iterable[str]): Task names used for filtering. + task_types (Itartable[str]): Task types used for filtering. + folder_ids (Iterable[str]): Ids of task parents. Use 'None' + if folder is direct child of project. + active (Union[bool, None]): Filter active/inactive tasks. + Both are returned if is set to None. + fields (Union[Iterable[str], None]): Fields to be queried for + folder. All possible folder fields are returned + if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Generator[dict[str, Any]]: Queried task entities. + """ + + if not project_name: + return + + filters = { + "projectName": project_name + } + + if task_ids is not None: + task_ids = set(task_ids) + if not task_ids: + return + filters["taskIds"] = list(task_ids) + + if task_names is not None: + task_names = set(task_names) + if not task_names: + return + filters["taskNames"] = list(task_names) + + if task_types is not None: + task_types = set(task_types) + if not task_types: + return + filters["taskTypes"] = list(task_types) + + if folder_ids is not None: + folder_ids = set(folder_ids) + if not folder_ids: + return + filters["folderIds"] = list(folder_ids) + + if not fields: + fields = self.get_default_fields_for_type("task") + + fields = set(fields) + + use_rest = False + if "data" in fields: + use_rest = True + fields = {"id"} + + if active is not None: + fields.add("active") + + if own_attributes: + fields.add("ownAttrib") + + query = tasks_graphql_query(fields) + for attr, filter_value in filters.items(): + query.set_variable_value(attr, filter_value) + + for parsed_data in query.continuous_query(self): + for task in parsed_data["project"]["tasks"]: + if active is not None and active is not task["active"]: + continue + + if use_rest: + task = self.get_rest_task(project_name, task["id"]) + + if own_attributes: + fill_own_attribs(task) + yield task + + def get_task_by_name( + self, + project_name, + folder_id, + task_name, + fields=None, + own_attributes=False + ): + """Query task entity by name and folder id. + + Args: + project_name (str): Name of project where to look for queried + entities. + folder_id (str): Folder id. + task_name (str): Task name + fields (Union[Iterable[str], None]): Fields that should be returned. + All fields are returned if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict, None]: Task entity data or None if was not found. + """ + + for task in self.get_tasks( + project_name, + folder_ids=[folder_id], + task_names=[task_name], + active=None, + fields=fields, + own_attributes=own_attributes + ): + return task + return None + + def get_task_by_id( + self, + project_name, + task_id, + fields=None, + own_attributes=False + ): + """Query task entity by id. + + Args: + project_name (str): Name of project where to look for queried + entities. + task_id (str): Task id. + fields (Union[Iterable[str], None]): Fields that should be returned. + All fields are returned if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict, None]: Task entity data or None if was not found. + """ + + for task in self.get_tasks( + project_name, + task_ids=[task_id], + active=None, + fields=fields, + own_attributes=own_attributes + ): + return task + return None + + + def get_folder_by_id( + self, + project_name, + folder_id, + fields=None, + own_attributes=False + ): + """Query folder entity by id. + + Args: + project_name (str): Name of project where to look for queried + entities. + folder_id (str): Folder id. + fields (Union[Iterable[str], None]): Fields that should be returned. + All fields are returned if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict, None]: Folder entity data or None if was not found. + """ + + folders = self.get_folders( + project_name, + folder_ids=[folder_id], + active=None, + fields=fields, + own_attributes=own_attributes + ) + for folder in folders: + return folder + return None + + def get_folder_by_path( + self, + project_name, + folder_path, + fields=None, + own_attributes=False + ): + """Query folder entity by path. + + Folder path is a path to folder with all parent names joined by slash. + + Args: + project_name (str): Name of project where to look for queried + entities. + folder_path (str): Folder path. + fields (Union[Iterable[str], None]): Fields that should be returned. + All fields are returned if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict, None]: Folder entity data or None if was not found. + """ + + folders = self.get_folders( + project_name, + folder_paths=[folder_path], + active=None, + fields=fields, + own_attributes=own_attributes + ) + for folder in folders: + return folder + return None + + def get_folder_by_name( + self, + project_name, + folder_name, + fields=None, + own_attributes=False + ): + """Query folder entity by path. + + Warnings: + Folder name is not a unique identifier of a folder. Function is + kept for OpenPype 3 compatibility. + + Args: + project_name (str): Name of project where to look for queried + entities. + folder_name (str): Folder name. + fields (Union[Iterable[str], None]): Fields that should be returned. + All fields are returned if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict, None]: Folder entity data or None if was not found. + """ + + folders = self.get_folders( + project_name, + folder_names=[folder_name], + active=None, + fields=fields, + own_attributes=own_attributes + ) + for folder in folders: + return folder + return None + + def get_folder_ids_with_subsets(self, project_name, folder_ids=None): + """Find folders which have at least one subset. + + Folders that have at least one subset should be immutable, so they + should not change path -> change of name or name of any parent + is not possible. + + Args: + project_name (str): Name of project. + folder_ids (Union[Iterable[str], None]): Limit folder ids filtering + to a set of folders. If set to None all folders on project are + checked. + + Returns: + set[str]: Folder ids that have at least one subset. + """ + + if folder_ids is not None: + folder_ids = set(folder_ids) + if not folder_ids: + return set() + + query = folders_graphql_query({"id"}) + query.set_variable_value("projectName", project_name) + query.set_variable_value("folderHasSubsets", True) + if folder_ids: + query.set_variable_value("folderIds", list(folder_ids)) + + parsed_data = query.query(self) + folders = parsed_data["project"]["folders"] + return { + folder["id"] + for folder in folders + } + + def _filter_subset( + self, project_name, subset, active, own_attributes, use_rest + ): + if active is not None and subset["active"] is not active: + return None + + if use_rest: + subset = self.get_rest_subset(project_name, subset["id"]) + + if own_attributes: + fill_own_attribs(subset) + + return subset + + def get_subsets( + self, + project_name, + subset_ids=None, + subset_names=None, + folder_ids=None, + names_by_folder_ids=None, + active=True, + fields=None, + own_attributes=False + ): + """Query subsets from server. + + Todos: + Separate 'name_by_folder_ids' filtering to separated method. It + cannot be combined with some other filters. + + Args: + project_name (str): Name of project. + subset_ids (Iterable[str]): Task ids to filter. + subset_names (Iterable[str]): Task names used for filtering. + folder_ids (Iterable[str]): Ids of task parents. Use 'None' + if folder is direct child of project. + names_by_folder_ids (dict[str, Iterable[str]]): Subset name + filtering by folder id. + active (Union[bool, None]): Filter active/inactive subsets. + Both are returned if is set to None. + fields (Union[Iterable[str], None]): Fields to be queried for + folder. All possible folder fields are returned + if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Generator[dict[str, Any]]: Queried subset entities. + """ + + if not project_name: + return + + if subset_ids is not None: + subset_ids = set(subset_ids) + if not subset_ids: + return + + filter_subset_names = None + if subset_names is not None: + filter_subset_names = set(subset_names) + if not filter_subset_names: + return + + filter_folder_ids = None + if folder_ids is not None: + filter_folder_ids = set(folder_ids) + if not filter_folder_ids: + return + + # This will disable 'folder_ids' and 'subset_names' filters + # - maybe could be enhanced in future? + if names_by_folder_ids is not None: + filter_subset_names = set() + filter_folder_ids = set() + + for folder_id, names in names_by_folder_ids.items(): + if folder_id and names: + filter_folder_ids.add(folder_id) + filter_subset_names |= set(names) + + if not filter_subset_names or not filter_folder_ids: + return + + # Convert fields and add minimum required fields + if fields: + fields = set(fields) | {"id"} + else: + fields = self.get_default_fields_for_type("subset") + + use_rest = False + if "data" in fields: + use_rest = True + fields = {"id"} + + if active is not None: + fields.add("active") + + if own_attributes: + fields.add("ownAttrib") + + # Add 'name' and 'folderId' if 'names_by_folder_ids' filter is entered + if names_by_folder_ids: + fields.add("name") + fields.add("folderId") + + # Prepare filters for query + filters = { + "projectName": project_name + } + if filter_folder_ids: + filters["folderIds"] = list(filter_folder_ids) + + if subset_ids: + filters["subsetIds"] = list(subset_ids) + + if filter_subset_names: + filters["subsetNames"] = list(filter_subset_names) + + query = subsets_graphql_query(fields) + for attr, filter_value in filters.items(): + query.set_variable_value(attr, filter_value) + + parsed_data = query.query(self) + + subsets = parsed_data.get("project", {}).get("subsets", []) + # Filter subsets by 'names_by_folder_ids' + if names_by_folder_ids: + subsets_by_folder_id = collections.defaultdict(list) + for subset in subsets: + filtered_subset = self._filter_subset( + project_name, subset, active, own_attributes, use_rest + ) + if filtered_subset is not None: + folder_id = filtered_subset["folderId"] + subsets_by_folder_id[folder_id].append(filtered_subset) + + for folder_id, names in names_by_folder_ids.items(): + for folder_subset in subsets_by_folder_id[folder_id]: + if folder_subset["name"] in names: + yield folder_subset + + else: + for subset in subsets: + filtered_subset = self._filter_subset( + project_name, subset, active, own_attributes, use_rest + ) + if filtered_subset is not None: + yield filtered_subset + + + def get_subset_by_id( + self, + project_name, + subset_id, + fields=None, + own_attributes=False + ): + """Query subset entity by id. + + Args: + project_name (str): Name of project where to look for queried + entities. + subset_id (str): Subset id. + fields (Union[Iterable[str], None]): Fields that should be returned. + All fields are returned if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict, None]: Subset entity data or None if was not found. + """ + + subsets = self.get_subsets( + project_name, + subset_ids=[subset_id], + active=None, + fields=fields, + own_attributes=own_attributes + ) + for subset in subsets: + return subset + return None + + def get_subset_by_name( + self, + project_name, + subset_name, + folder_id, + fields=None, + own_attributes=False + ): + """Query subset entity by name and folder id. + + Args: + project_name (str): Name of project where to look for queried + entities. + subset_name (str): Subset name. + folder_id (str): Folder id (Folder is a parent of subsets). + fields (Union[Iterable[str], None]): Fields that should be returned. + All fields are returned if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict, None]: Subset entity data or None if was not found. + """ + + subsets = self.get_subsets( + project_name, + subset_names=[subset_name], + folder_ids=[folder_id], + active=None, + fields=fields, + own_attributes=own_attributes + ) + for subset in subsets: + return subset + return None + + def get_subset_families(self, project_name, subset_ids=None): + """Families of subsets from a project. + + Args: + project_name (str): Name of project where to look for queried + entities. + subset_ids (Union[None, Iterable[str]]): Limit filtering to set + of subset ids. + + Returns: + set[str]: Families found on subsets. + """ + + if subset_ids is not None: + subsets = self.get_subsets( + project_name, + subset_ids=subset_ids, + fields=["family"], + active=None, + ) + return { + subset["family"] + for subset in subsets + } + + query = GraphQlQuery("SubsetFamilies") + project_name_var = query.add_variable( + "projectName", "String!", project_name + ) + project_query = query.add_field("project") + project_query.set_filter("name", project_name_var) + project_query.add_field("subsetFamilies") + + parsed_data = query.query(self) + + return set(parsed_data.get("project", {}).get("subsetFamilies", [])) + + def get_versions( + self, + project_name, + version_ids=None, + subset_ids=None, + versions=None, + hero=True, + standard=True, + latest=None, + active=True, + fields=None, + own_attributes=False + ): + """Get version entities based on passed filters from server. + + Args: + project_name (str): Name of project where to look for versions. + version_ids (Iterable[str]): Version ids used for version + filtering. + subset_ids (Iterable[str]): Subset ids used for version filtering. + versions (Iterable[int]): Versions we're interested in. + hero (bool): Receive also hero versions when set to true. + standard (bool): Receive versions which are not hero when + set to true. + latest (bool): Return only latest version of standard versions. + This can be combined only with 'standard' attribute + set to True. + active (Union[bool, None]): Receive active/inactive entities. + Both are returned when 'None' is passed. + fields (Union[Iterable[str], None]): Fields to be queried + for version. All possible folder fields are returned + if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Generator[Dict[str, Any]]: Queried version entities. + """ + + if not fields: + fields = self.get_default_fields_for_type("version") + fields = set(fields) + + if active is not None: + fields.add("active") + + # Make sure fields have minimum required fields + fields |= {"id", "version"} + + use_rest = False + if "data" in fields: + use_rest = True + fields = {"id"} + + if own_attributes: + fields.add("ownAttrib") + + filters = { + "projectName": project_name + } + if version_ids is not None: + version_ids = set(version_ids) + if not version_ids: + return + filters["versionIds"] = list(version_ids) + + if subset_ids is not None: + subset_ids = set(subset_ids) + if not subset_ids: + return + filters["subsetIds"] = list(subset_ids) + + # TODO versions can't be used as fitler at this moment! + if versions is not None: + versions = set(versions) + if not versions: + return + filters["versions"] = list(versions) + + if not hero and not standard: + return + + queries = [] + # Add filters based on 'hero' and 'standard' + # NOTE: There is not a filter to "ignore" hero versions or to get + # latest and hero version + # - if latest and hero versions should be returned it must be done in + # 2 graphql queries + if standard and not latest: + # This query all versions standard + hero + # - hero must be filtered out if is not enabled during loop + query = versions_graphql_query(fields) + for attr, filter_value in filters.items(): + query.set_variable_value(attr, filter_value) + queries.append(query) + else: + if hero: + # Add hero query if hero is enabled + hero_query = versions_graphql_query(fields) + for attr, filter_value in filters.items(): + hero_query.set_variable_value(attr, filter_value) + + hero_query.set_variable_value("heroOnly", True) + queries.append(hero_query) + + if standard: + standard_query = versions_graphql_query(fields) + for attr, filter_value in filters.items(): + standard_query.set_variable_value(attr, filter_value) + + if latest: + standard_query.set_variable_value("latestOnly", True) + queries.append(standard_query) + + for query in queries: + for parsed_data in query.continuous_query(self): + for version in parsed_data["project"]["versions"]: + if active is not None and version["active"] is not active: + continue + + if not hero and version["version"] < 0: + continue + + if use_rest: + version = self.get_rest_version( + project_name, version["id"] + ) + + if own_attributes: + fill_own_attribs(version) + + yield version + + def get_version_by_id( + self, + project_name, + version_id, + fields=None, + own_attributes=False + ): + """Query version entity by id. + + Args: + project_name (str): Name of project where to look for queried + entities. + version_id (str): Version id. + fields (Union[Iterable[str], None]): Fields that should be returned. + All fields are returned if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict, None]: Version entity data or None if was not found. + """ + + versions = self.get_versions( + project_name, + version_ids=[version_id], + active=None, + hero=True, + fields=fields, + own_attributes=own_attributes + ) + for version in versions: + return version + return None + + def get_version_by_name( + self, + project_name, + version, + subset_id, + fields=None, + own_attributes=False + ): + """Query version entity by version and subset id. + + Args: + project_name (str): Name of project where to look for queried + entities. + version (int): Version of version entity. + subset_id (str): Subset id. Subset is a parent of version. + fields (Union[Iterable[str], None]): Fields that should be returned. + All fields are returned if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict, None]: Version entity data or None if was not found. + """ + + versions = self.get_versions( + project_name, + subset_ids=[subset_id], + versions=[version], + active=None, + fields=fields, + own_attributes=own_attributes + ) + for version in versions: + return version + return None + + def get_hero_version_by_id( + self, + project_name, + version_id, + fields=None, + own_attributes=False + ): + """Query hero version entity by id. + + Args: + project_name (str): Name of project where to look for queried + entities. + version_id (int): Hero version id. + fields (Union[Iterable[str], None]): Fields that should be returned. + All fields are returned if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict, None]: Version entity data or None if was not found. + """ + + versions = self.get_hero_versions( + project_name, + version_ids=[version_id], + fields=fields, + own_attributes=own_attributes + ) + for version in versions: + return version + return None + + def get_hero_version_by_subset_id( + self, + project_name, + subset_id, + fields=None, + own_attributes=False + ): + """Query hero version entity by subset id. + + Only one hero version is available on a subset. + + Args: + project_name (str): Name of project where to look for queried + entities. + subset_id (int): Subset id. + fields (Union[Iterable[str], None]): Fields that should be returned. + All fields are returned if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict, None]: Version entity data or None if was not found. + """ + + versions = self.get_hero_versions( + project_name, + subset_ids=[subset_id], + fields=fields, + own_attributes=own_attributes + ) + for version in versions: + return version + return None + + def get_hero_versions( + self, + project_name, + subset_ids=None, + version_ids=None, + active=True, + fields=None, + own_attributes=False + ): + """Query hero versions by multiple filters. + + Only one hero version is available on a subset. + + Args: + project_name (str): Name of project where to look for queried + entities. + subset_ids (int): Subset ids. + version_ids (int): Version ids. + active (Union[bool, None]): Receive active/inactive entities. + Both are returned when 'None' is passed. + fields (Union[Iterable[str], None]): Fields that should be returned. + All fields are returned if 'None' is passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict, None]: Version entity data or None if was not found. + """ + + return self.get_versions( + project_name, + version_ids=version_ids, + subset_ids=subset_ids, + hero=True, + standard=False, + active=active, + fields=fields, + own_attributes=own_attributes + ) + + def get_last_versions( + self, + project_name, + subset_ids, + active=True, + fields=None, + own_attributes=False + ): + """Query last version entities by subset ids. + + Args: + project_name (str): Project where to look for representation. + subset_ids (Iterable[str]): Subset ids. + active (Union[bool, None]): Receive active/inactive entities. + Both are returned when 'None' is passed. + fields (Union[Iterable[str], None]): fields to be queried + for representations. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + dict[str, dict[str, Any]]: Last versions by subset id. + """ + + versions = self.get_versions( + project_name, + subset_ids=subset_ids, + latest=True, + active=active, + fields=fields, + own_attributes=own_attributes + ) + return { + version["parent"]: version + for version in versions + } + + def get_last_version_by_subset_id( + self, + project_name, + subset_id, + active=True, + fields=None, + own_attributes=False + ): + """Query last version entity by subset id. + + Args: + project_name (str): Project where to look for representation. + subset_id (str): Subset id. + active (Union[bool, None]): Receive active/inactive entities. + Both are returned when 'None' is passed. + fields (Union[Iterable[str], None]): fields to be queried + for representations. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict[str, Any], None]: Queried version entity or None. + """ + + versions = self.get_versions( + project_name, + subset_ids=[subset_id], + latest=True, + active=active, + fields=fields, + own_attributes=own_attributes + ) + for version in versions: + return version + return None + + def get_last_version_by_subset_name( + self, + project_name, + subset_name, + folder_id, + active=True, + fields=None, + own_attributes=False + ): + """Query last version entity by subset name and folder id. + + Args: + project_name (str): Project where to look for representation. + subset_name (str): Subset name. + folder_id (str): Folder id. + active (Union[bool, None]): Receive active/inactive entities. + Both are returned when 'None' is passed. + fields (Union[Iterable[str], None]): fields to be queried + for representations. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict[str, Any], None]: Queried version entity or None. + """ + + if not folder_id: + return None + + subset = self.get_subset_by_name( + project_name, subset_name, folder_id, fields=["_id"] + ) + if not subset: + return None + return self.get_last_version_by_subset_id( + project_name, + subset["id"], + active=active, + fields=fields, + own_attributes=own_attributes + ) + + def version_is_latest(self, project_name, version_id): + """Is version latest from a subset. + + Args: + project_name (str): Project where to look for representation. + version_id (str): Version id. + + Returns: + bool: Version is latest or not. + """ + + query = GraphQlQuery("VersionIsLatest") + project_name_var = query.add_variable( + "projectName", "String!", project_name + ) + version_id_var = query.add_variable( + "versionId", "String!", version_id + ) + project_query = query.add_field("project") + project_query.set_filter("name", project_name_var) + version_query = project_query.add_field("version") + version_query.set_filter("id", version_id_var) + subset_query = version_query.add_field("subset") + latest_version_query = subset_query.add_field("latestVersion") + latest_version_query.add_field("id") + + parsed_data = query.query(self) + latest_version = ( + parsed_data["project"]["version"]["subset"]["latestVersion"] + ) + return latest_version["id"] == version_id + + def get_representations( + self, + project_name, + representation_ids=None, + representation_names=None, + version_ids=None, + names_by_version_ids=None, + active=True, + fields=None, + own_attributes=False + ): + """Get representation entities based on passed filters from server. + + Todos: + Add separated function for 'names_by_version_ids' filtering. + Because can't be combined with others. + + Args: + project_name (str): Name of project where to look for versions. + representation_ids (Iterable[str]): Representation ids used for + representation filtering. + representation_names (Iterable[str]): Representation names used for + representation filtering. + version_ids (Iterable[str]): Version ids used for + representation filtering. Versions are parents of + representations. + names_by_version_ids (bool): Find representations by names and + version ids. This filter discard all other filters. + active (Union[bool, None]): Receive active/inactive entities. + Both are returned when 'None' is passed. + fields (Union[Iterable[str], None]): Fields to be queried for + representation. All possible fields are returned if 'None' is + passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Generator[Dict[str, Any]]: Queried representation entities. + """ + + if not fields: + fields = self.get_default_fields_for_type("representation") + fields = set(fields) + + use_rest = False + if "data" in fields: + use_rest = True + fields = {"id"} + + if active is not None: + fields.add("active") + + if own_attributes: + fields.add("ownAttrib") + + filters = { + "projectName": project_name + } + + if representation_ids is not None: + representation_ids = set(representation_ids) + if not representation_ids: + return + filters["representationIds"] = list(representation_ids) + + version_ids_filter = None + representaion_names_filter = None + if names_by_version_ids is not None: + version_ids_filter = set() + representaion_names_filter = set() + for version_id, names in names_by_version_ids.items(): + version_ids_filter.add(version_id) + representaion_names_filter |= set(names) + + if not version_ids_filter or not representaion_names_filter: + return + + else: + if representation_names is not None: + representaion_names_filter = set(representation_names) + if not representaion_names_filter: + return + + if version_ids is not None: + version_ids_filter = set(version_ids) + if not version_ids_filter: + return + + if version_ids_filter: + filters["versionIds"] = list(version_ids_filter) + + if representaion_names_filter: + filters["representationNames"] = list(representaion_names_filter) + + query = representations_graphql_query(fields) + + for attr, filter_value in filters.items(): + query.set_variable_value(attr, filter_value) + + for parsed_data in query.continuous_query(self): + for repre in parsed_data["project"]["representations"]: + if active is not None and active is not repre["active"]: + continue + + if use_rest: + repre = self.get_rest_representation( + project_name, repre["id"] + ) + + if "context" in repre: + orig_context = repre["context"] + context = {} + if orig_context and orig_context != "null": + context = json.loads(orig_context) + repre["context"] = context + + if own_attributes: + fill_own_attribs(repre) + yield repre + + def get_representation_by_id( + self, + project_name, + representation_id, + fields=None, + own_attributes=False + ): + """Query representation entity from server based on id filter. + + Args: + project_name (str): Project where to look for representation. + representation_id (str): Id of representation. + fields (Union[Iterable[str], None]): fields to be queried + for representations. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict[str, Any], None]: Queried representation entity or None. + """ + + representations = self.get_representations( + project_name, + representation_ids=[representation_id], + active=None, + fields=fields, + own_attributes=own_attributes + ) + for representation in representations: + return representation + return None + + def get_representation_by_name( + self, + project_name, + representation_name, + version_id, + fields=None, + own_attributes=False + ): + """Query representation entity by name and version id. + + Args: + project_name (str): Project where to look for representation. + representation_name (str): Representation name. + version_id (str): Version id. + fields (Union[Iterable[str], None]): fields to be queried + for representations. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict[str, Any], None]: Queried representation entity or None. + """ + + representations = self.get_representations( + project_name, + representation_names=[representation_name], + version_ids=[version_id], + active=None, + fields=fields, + own_attributes=own_attributes + ) + for representation in representations: + return representation + return None + + def get_representations_parents(self, project_name, representation_ids): + """Find representations parents by representation id. + + Representation parent entities up to project. + + Args: + project_name (str): Project where to look for entities. + representation_ids (Iterable[str]): Representation ids. + + Returns: + dict[str, RepresentationParents]: Parent entities by + representation id. + """ + + if not representation_ids: + return {} + + project = self.get_project(project_name) + repre_ids = set(representation_ids) + output = { + repre_id: RepresentationParents(None, None, None, None) + for repre_id in representation_ids + } + + version_fields = self.get_default_fields_for_type("version") + subset_fields = self.get_default_fields_for_type("subset") + folder_fields = self.get_default_fields_for_type("folder") + + query = representations_parents_qraphql_query( + version_fields, subset_fields, folder_fields + ) + query.set_variable_value("projectName", project_name) + query.set_variable_value("representationIds", list(repre_ids)) + + parsed_data = query.query(self) + for repre in parsed_data["project"]["representations"]: + repre_id = repre["id"] + version = repre.pop("version") + subset = version.pop("subset") + folder = subset.pop("folder") + output[repre_id] = RepresentationParents( + version, subset, folder, project + ) + + return output + + def get_representation_parents(self, project_name, representation_id): + """Find representation parents by representation id. + + Representation parent entities up to project. + + Args: + project_name (str): Project where to look for entities. + representation_id (str): Representation id. + + Returns: + RepresentationParents: Representation parent entities. + """ + + if not representation_id: + return None + + parents_by_repre_id = self.get_representations_parents( + project_name, [representation_id] + ) + return parents_by_repre_id[representation_id] + + def get_repre_ids_by_context_filters( + self, + project_name, + context_filters, + representation_names=None, + version_ids=None + ): + """Find representation ids which match passed context filters. + + Each representation has context integrated on representation entity in + database. The context may contain project, folder, task name or subset, + family and many more. This implementation gives option to quickly + filter representation based on representation data in database. + + Context filters have defined structure. To define filter of nested + subfield use dot '.' as delimiter (For example 'task.name'). + Filter values can be regex filters. String or 're.Pattern' can be used. + + Args: + project_name (str): Project where to look for representations. + context_filters (dict[str, list[str]]): Filters of context fields. + representation_names (Iterable[str]): Representation names, can be + used as additional filter for representations by their names. + version_ids (Iterable[str]): Version ids, can be used as additional + filter for representations by their parent ids. + + Returns: + list[str]: Representation ids that match passed filters. + + Example: + The function returns just representation ids so if entities are + required for funtionality they must be queried afterwards by + their ids. + >>> project_name = "testProject" + >>> filters = { + ... "task.name": ["[aA]nimation"], + ... "subset": [".*[Mm]ain"] + ... } + >>> repre_ids = get_repre_ids_by_context_filters( + ... project_name, filters) + >>> repres = get_representations(project_name, repre_ids) + """ + + if not isinstance(context_filters, dict): + raise TypeError( + "Expected 'dict' got {}".format(str(type(context_filters))) + ) + + filter_body = {} + if representation_names is not None: + if not representation_names: + return [] + filter_body["names"] = list(set(representation_names)) + + if version_ids is not None: + if not version_ids: + return [] + filter_body["versionIds"] = list(set(version_ids)) + + body_context_filters = [] + for key, filters in context_filters.items(): + if not isinstance(filters, (set, list, tuple)): + raise TypeError( + "Expected 'set', 'list', 'tuple' got {}".format( + str(type(filters)))) + + + new_filters = set() + for filter_value in filters: + if isinstance(filter_value, PatternType): + filter_value = filter_value.pattern + new_filters.add(filter_value) + + body_context_filters.append({ + "key": key, + "values": list(new_filters) + }) + + response = self.post( + "projects/{}/repreContextFilter".format(project_name), + context=body_context_filters, + **filter_body + ) + response.raise_for_status() + return response.data["ids"] + + def get_workfiles_info( + self, + project_name, + workfile_ids=None, + task_ids=None, + paths=None, + fields=None, + own_attributes=False + ): + """Workfile info entities by passed filters. + + Args: + project_name (str): Project under which the entity is located. + workfile_ids (Optional[Iterable[str]]): Workfile ids. + task_ids (Optional[Iterable[str]]): Task ids. + paths (Optional[Iterable[str]]): Rootless workfiles paths. + fields (Union[Iterable[str], None]): Fields to be queried for + representation. All possible fields are returned if 'None' is + passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Generator[dict[str, Any]]: Queried workfile info entites. + """ + + filters = {"projectName": project_name} + if task_ids is not None: + task_ids = set(task_ids) + if not task_ids: + return + filters["taskIds"] = list(task_ids) + + if paths is not None: + paths = set(paths) + if not paths: + return + filters["paths"] = list(paths) + + if workfile_ids is not None: + workfile_ids = set(workfile_ids) + if not workfile_ids: + return + filters["workfileIds"] = list(workfile_ids) + + if not fields: + fields = DEFAULT_WORKFILE_INFO_FIELDS + fields = set(fields) + if own_attributes: + fields.add("ownAttrib") + + query = workfiles_info_graphql_query(fields) + + for attr, filter_value in filters.items(): + query.set_variable_value(attr, filter_value) + + for parsed_data in query.continuous_query(self): + for workfile_info in parsed_data["project"]["workfiles"]: + if own_attributes: + fill_own_attribs(workfile_info) + yield workfile_info + + def get_workfile_info( + self, project_name, task_id, path, fields=None, own_attributes=False + ): + """Workfile info entity by task id and workfile path. + + Args: + project_name (str): Project under which the entity is located. + task_id (str): Task id. + path (str): Rootless workfile path. + fields (Union[Iterable[str], None]): Fields to be queried for + representation. All possible fields are returned if 'None' is + passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict[str, Any], None]: Workfile info entity or None. + """ + + if not task_id or not path: + return None + + for workfile_info in self.get_workfiles_info( + project_name, + task_ids=[task_id], + paths=[path], + fields=fields, + own_attributes=own_attributes + ): + return workfile_info + return None + + def get_workfile_info_by_id( + self, project_name, workfile_id, fields=None, own_attributes=False + ): + """Workfile info entity by id. + + Args: + project_name (str): Project under which the entity is located. + workfile_id (str): Workfile info id. + fields (Union[Iterable[str], None]): Fields to be queried for + representation. All possible fields are returned if 'None' is + passed. + own_attributes (bool): Attribute values that are not explicitly set + on entity will have 'None' value. + + Returns: + Union[dict[str, Any], None]: Workfile info entity or None. + """ + + if not workfile_id: + return None + + for workfile_info in self.get_workfiles_info( + project_name, + workfile_ids=[workfile_id], + fields=fields, + own_attributes=own_attributes + ): + return workfile_info + return None + + def get_thumbnail( + self, project_name, entity_type, entity_id, thumbnail_id=None + ): + """Get thumbnail from server. + + Permissions of thumbnails are related to entities so thumbnails must be + queried per entity. So an entity type and entity type is required to + be passed. + + If thumbnail id is passed logic can look into locally cached thumbnails + before calling server which can enhance loading time. If thumbnail id + is not passed the thumbnail is always downloaded even if is available. + + Notes: + It is recommended to use one of prepared entity type specific + methods 'get_folder_thumbnail', 'get_version_thumbnail' or + 'get_workfile_thumbnail'. + We do recommend pass thumbnail id if you have access to it. Each + entity that allows thumbnails has 'thumbnailId' field so it can + be queried. + + Args: + project_name (str): Project under which the entity is located. + entity_type (str): Entity type which passed entity id represents. + entity_id (str): Entity id for which thumbnail should be returned. + thumbnail_id (str): Prepared thumbnail id from entity. Used only + to check if thumbnail was already cached. + + Returns: + Union[str, None]: Path to downlaoded thumbnail or none if entity + does not have any (or if user does not have permissions). + """ + + # Look for thumbnail into cache and return the path if was found + filepath = self._thumbnail_cache.get_thumbnail_filepath( + project_name, thumbnail_id + ) + if filepath: + return filepath + + if entity_type in ( + "folder", + "version", + "workfile", + ): + entity_type += "s" + + # Receive thumbnail content from server + result = self.raw_get("projects/{}/{}/{}/thumbnail".format( + project_name, + entity_type, + entity_id + )) + + if result.content_type is None: + return None + + # It is expected the response contains thumbnail id otherwise the + # content cannot be cached and filepath returned + thumbnail_id = result.headers.get("X-Thumbnail-Id") + if thumbnail_id is None: + return None + + # Cache thumbnail and return it's path + return self._thumbnail_cache.store_thumbnail( + project_name, + thumbnail_id, + result.content, + result.content_type + ) + + def get_folder_thumbnail( + self, project_name, folder_id, thumbnail_id=None + ): + """Prepared method to receive thumbnail for folder entity. + + Args: + project_name (str): Project under which the entity is located. + folder_id (str): Folder id for which thumbnail should be returned. + thumbnail_id (str): Prepared thumbnail id from entity. Used only + to check if thumbnail was already cached. + + Returns: + Union[str, None]: Path to downlaoded thumbnail or none if entity + does not have any (or if user does not have permissions). + """ + + return self.get_thumbnail( + project_name, "folder", folder_id, thumbnail_id + ) + + def get_version_thumbnail( + self, project_name, version_id, thumbnail_id=None + ): + """Prepared method to receive thumbnail for version entity. + + Args: + project_name (str): Project under which the entity is located. + version_id (str): Version id for which thumbnail should be + returned. + thumbnail_id (str): Prepared thumbnail id from entity. Used only + to check if thumbnail was already cached. + + Returns: + Union[str, None]: Path to downlaoded thumbnail or none if entity + does not have any (or if user does not have permissions). + """ + + return self.get_thumbnail( + project_name, "version", version_id, thumbnail_id + ) + + def get_workfile_thumbnail( + self, project_name, workfile_id, thumbnail_id=None + ): + """Prepared method to receive thumbnail for workfile entity. + + Args: + project_name (str): Project under which the entity is located. + workfile_id (str): Worfile id for which thumbnail should be + returned. + thumbnail_id (str): Prepared thumbnail id from entity. Used only + to check if thumbnail was already cached. + + Returns: + Union[str, None]: Path to downlaoded thumbnail or none if entity + does not have any (or if user does not have permissions). + """ + + return self.get_thumbnail( + project_name, "workfile", workfile_id, thumbnail_id + ) + + def create_project( + self, + project_name, + project_code, + library_project=False, + preset_name=None + ): + """Create project using Ayon settings. + + This project creation function is not validating project entity on + creation. It is because project entity is created blindly with only + minimum required information about project which is it's name, code. + + Entered project name must be unique and project must not exist yet. + + Note: + This function is here to be OP v4 ready but in v3 has more logic + to do. That's why inner imports are in the body. + + Args: + project_name (str): New project name. Should be unique. + project_code (str): Project's code should be unique too. + library_project (bool): Project is library project. + preset_name (str): Name of anatomy preset. Default is used if not + passed. + con (ServerAPI): Connection to server with logged user. + + Raises: + ValueError: When project name already exists. + + Returns: + Dict[str, Any]: Created project entity. + """ + + if self.get_project(project_name): + raise ValueError("Project with name \"{}\" already exists".format( + project_name + )) + + if not PROJECT_NAME_REGEX.match(project_name): + raise ValueError(( + "Project name \"{}\" contain invalid characters" + ).format(project_name)) + + preset = self.get_project_anatomy_preset(preset_name) + + result = self.post( + "projects", + name=project_name, + code=project_code, + anatomy=preset, + library=library_project + ) + + if result.status != 201: + details = "Unknown details ({})".format(result.status) + if result.data: + details = result.data.get("detail") or details + raise ValueError("Failed to create project \"{}\": {}".format( + project_name, details + )) + + return self.get_project(project_name) + + def delete_project(self, project_name): + """Delete project from server. + + This will completely remove project from server without any step back. + + Args: + project_name (str): Project name that will be removed. + """ + + if not self.get_project(project_name): + raise ValueError("Project with name \"{}\" was not found".format( + project_name + )) + + result = self.delete("projects/{}".format(project_name)) + if result.status_code != 204: + raise ValueError( + "Failed to delete project \"{}\". {}".format( + project_name, result.data["detail"] + ) + ) + + def create_thumbnail(self, project_name, src_filepath): + """Create new thumbnail on server from passed path. + + Args: + project_name (str): Project where the thumbnail will be created + and can be used. + src_filepath (str): Filepath to thumbnail which should be uploaded. + + Returns: + str: Created thumbnail id. + + Todos: + Define more specific exceptions for thumbnail creation. + + Raises: + ValueError: When thumbnail creation fails (due to many reasons). + """ + + if not os.path.exists(src_filepath): + raise ValueError("Entered filepath does not exist.") + + ext = os.path.splitext(src_filepath)[-1].lower() + if ext == ".png": + mime_type = "image/png" + + elif ext in (".jpeg", ".jpg"): + mime_type = "image/jpeg" + + else: + raise ValueError( + "Thumbnail source file has unknown extensions {}".format(ext)) + + with open(src_filepath, "rb") as stream: + content = stream.read() + + response = self.raw_post( + "projects/{}/thumbnails".format(project_name), + headers={"Content-Type": mime_type}, + data=content + ) + if response.status_code != 200: + _detail = response.data.get("detail") + details = "" + if _detail: + details = " {}".format(_detail) + raise ValueError( + "Failed to create thumbnail.{}".format(details)) + return response.data["id"] + + def send_batch_operations( + self, + project_name, + operations, + can_fail=False, + raise_on_fail=True + ): + """Post multiple CRUD operations to server. + + When multiple changes should be made on server side this is the best + way to go. It is possible to pass multiple operations to process on a + server side and do the changes in a transaction. + + Args: + project_name (str): On which project should be operations + processed. + operations (list[dict[str, Any]]): Operations to be processed. + can_fail (bool): Server will try to process all operations even if + one of them fails. + raise_on_fail (bool): Raise exception if an operation fails. + You can handle failed operations on your own when set to + 'False'. + + Raises: + ValueError: Operations can't be converted to json string. + FailedOperations: When output does not contain server operations + or 'raise_on_fail' is enabled and any operation fails. + + Returns: + list[dict[str, Any]]: Operations result with process details. + """ + + if not operations: + return [] + + body_by_id = {} + operations_body = [] + for operation in operations: + if not operation: + continue + + op_id = operation.get("id") + if not op_id: + op_id = create_entity_id() + operation["id"] = op_id + + try: + body = json.loads( + json.dumps(operation, default=entity_data_json_default) + ) + except: + raise ValueError("Couldn't json parse body: {}".format( + json.dumps( + operation, indent=4, default=failed_json_default + ) + )) + + body_by_id[op_id] = body + operations_body.append(body) + + if not operations_body: + return [] + + result = self.post( + "projects/{}/operations".format(project_name), + operations=operations_body, + canFail=can_fail + ) + + op_results = result.get("operations") + if op_results is None: + raise FailedOperations( + "Operation failed. Content: {}".format(str(result)) + ) + + if result.get("success") or not raise_on_fail: + return op_results + + for op_result in op_results: + if not op_result["success"]: + operation_id = op_result["id"] + raise FailedOperations(( + "Operation \"{}\" failed with data:\n{}\nDetail: {}." + ).format( + operation_id, + json.dumps(body_by_id[operation_id], indent=4), + op_result["detail"], + )) + return op_results diff --git a/openpype/vendor/python/ayon/ayon_api/thumbnails.py b/openpype/vendor/python/ayon/ayon_api/thumbnails.py new file mode 100644 index 0000000000..50acd94dcb --- /dev/null +++ b/openpype/vendor/python/ayon/ayon_api/thumbnails.py @@ -0,0 +1,219 @@ +import os +import time +import collections + +import appdirs + +FileInfo = collections.namedtuple( + "FileInfo", + ("path", "size", "modification_time") +) + + +class ThumbnailCache: + """Cache of thumbnails on local storage. + + Thumbnails are cached to appdirs to predefined directory. Each project has + own subfolder with thumbnails -> that's because each project has own + thumbnail id validation and file names are thumbnail ids with matching + extension. Extensions are predefined (.png and .jpeg). + + Cache has cleanup mechanism which is triggered on initialized by default. + + The cleanup has 2 levels: + 1. soft cleanup which remove all files that are older then 'days_alive' + 2. max size cleanup which remove all files until the thumbnails folder + contains less then 'max_filesize' + - this is time consuming so it's not triggered automatically + + Args: + cleanup (bool): Trigger soft cleanup (Cleanup expired thumbnails). + """ + + # Lifetime of thumbnails (in seconds) + # - default 3 days + days_alive = 3 * 24 * 60 * 60 + # Max size of thumbnail directory (in bytes) + # - default 2 Gb + max_filesize = 2 * 1024 * 1024 * 1024 + + def __init__(self, cleanup=True): + self._thumbnails_dir = None + if cleanup: + self.cleanup() + + def get_thumbnails_dir(self): + """Root directory where thumbnails are stored. + + Returns: + str: Path to thumbnails root. + """ + + if self._thumbnails_dir is None: + directory = appdirs.user_data_dir("ayon", "ynput") + self._thumbnails_dir = os.path.join(directory, "thumbnails") + return self._thumbnails_dir + + thumbnails_dir = property(get_thumbnails_dir) + + def get_thumbnails_dir_file_info(self): + """Get information about all files in thumbnails directory. + + Returns: + List[FileInfo]: List of file information about all files. + """ + + thumbnails_dir = self.thumbnails_dir + files_info = [] + if not os.path.exists(thumbnails_dir): + return files_info + + for root, _, filenames in os.walk(thumbnails_dir): + for filename in filenames: + path = os.path.join(root, filename) + files_info.append(FileInfo( + path, os.path.getsize(path), os.path.getmtime(path) + )) + return files_info + + def get_thumbnails_dir_size(self, files_info=None): + """Got full size of thumbnail directory. + + Args: + files_info (List[FileInfo]): Prepared file information about + files in thumbnail directory. + + Returns: + int: File size of all files in thumbnail directory. + """ + + if files_info is None: + files_info = self.get_thumbnails_dir_file_info() + + if not files_info: + return 0 + + return sum( + file_info.size + for file_info in files_info + ) + + def cleanup(self, check_max_size=False): + """Cleanup thumbnails directory. + + Args: + check_max_size (bool): Also cleanup files to match max size of + thumbnails directory. + """ + + thumbnails_dir = self.get_thumbnails_dir() + # Skip if thumbnails dir does not exists yet + if not os.path.exists(thumbnails_dir): + return + + self._soft_cleanup(thumbnails_dir) + if check_max_size: + self._max_size_cleanup(thumbnails_dir) + + def _soft_cleanup(self, thumbnails_dir): + current_time = time.time() + for root, _, filenames in os.walk(thumbnails_dir): + for filename in filenames: + path = os.path.join(root, filename) + modification_time = os.path.getmtime(path) + if current_time - modification_time > self.days_alive: + os.remove(path) + + def _max_size_cleanup(self, thumbnails_dir): + files_info = self.get_thumbnails_dir_file_info() + size = self.get_thumbnails_dir_size(files_info) + if size < self.max_filesize: + return + + sorted_file_info = collections.deque( + sorted(files_info, key=lambda item: item.modification_time) + ) + diff = size - self.max_filesize + while diff > 0: + if not sorted_file_info: + break + + file_info = sorted_file_info.popleft() + diff -= file_info.size + os.remove(file_info.path) + + def get_thumbnail_filepath(self, project_name, thumbnail_id): + """Get thumbnail by thumbnail id. + + Args: + project_name (str): Name of project. + thumbnail_id (str): Thumbnail id. + + Returns: + Union[str, None]: Path to thumbnail image or None if thumbnail + is not cached yet. + """ + + if not thumbnail_id: + return None + + for ext in ( + ".png", + ".jpeg", + ): + filepath = os.path.join( + self.thumbnails_dir, project_name, thumbnail_id + ext + ) + if os.path.exists(filepath): + return filepath + return None + + def get_project_dir(self, project_name): + """Path to root directory for specific project. + + Args: + project_name (str): Name of project for which root directory path + should be returned. + + Returns: + str: Path to root of project's thumbnails. + """ + + return os.path.join(self.thumbnails_dir, project_name) + + def make_sure_project_dir_exists(self, project_name): + project_dir = self.get_project_dir(project_name) + if not os.path.exists(project_dir): + os.makedirs(project_dir) + return project_dir + + def store_thumbnail(self, project_name, thumbnail_id, content, mime_type): + """Store thumbnail to cache folder. + + Args: + project_name (str): Project where the thumbnail belong to. + thumbnail_id (str): Id of thumbnail. + content (bytes): Byte content of thumbnail file. + mime_data (str): Type of content. + + Returns: + str: Path to cached thumbnail image file. + """ + + if mime_type == "image/png": + ext = ".png" + elif mime_type == "image/jpeg": + ext = ".jpeg" + else: + raise ValueError( + "Unknown mime type for thumbnail \"{}\"".format(mime_type)) + + project_dir = self.make_sure_project_dir_exists(project_name) + thumbnail_path = os.path.join(project_dir, thumbnail_id + ext) + with open(thumbnail_path, "wb") as stream: + stream.write(content) + + current_time = time.time() + os.utime(thumbnail_path, (current_time, current_time)) + + return thumbnail_path diff --git a/openpype/vendor/python/ayon/ayon_api/utils.py b/openpype/vendor/python/ayon/ayon_api/utils.py new file mode 100644 index 0000000000..28971f7de5 --- /dev/null +++ b/openpype/vendor/python/ayon/ayon_api/utils.py @@ -0,0 +1,451 @@ +import re +import datetime +import uuid +import string +import collections +try: + # Python 3 + from urllib.parse import urlparse, urlencode +except ImportError: + # Python 2 + from urlparse import urlparse + from urllib import urlencode + +import requests +import unidecode + +from .exceptions import UrlError + +REMOVED_VALUE = object() +SLUGIFY_WHITELIST = string.ascii_letters + string.digits +SLUGIFY_SEP_WHITELIST = " ,./\\;:!|*^#@~+-_=" + +RepresentationParents = collections.namedtuple( + "RepresentationParents", + ("version", "subset", "folder", "project") +) + + +def prepare_query_string(key_values): + """Prepare data to query string. + + If there are any values a query starting with '?' is returned otherwise + an empty string. + + Args: + dict[str, Any]: Query values. + + Returns: + str: Query string. + """ + + if not key_values: + return "" + return "?{}".format(urlencode(key_values)) + + +def create_entity_id(): + return uuid.uuid1().hex + + +def convert_entity_id(entity_id): + if not entity_id: + return None + + if isinstance(entity_id, uuid.UUID): + return entity_id.hex + + try: + return uuid.UUID(entity_id).hex + + except (TypeError, ValueError, AttributeError): + pass + return None + + +def convert_or_create_entity_id(entity_id=None): + output = convert_entity_id(entity_id) + if output is None: + output = create_entity_id() + return output + + +def entity_data_json_default(value): + if isinstance(value, datetime.datetime): + return int(value.timestamp()) + + raise TypeError( + "Object of type {} is not JSON serializable".format(str(type(value))) + ) + + +def slugify_string( + input_string, + separator="_", + slug_whitelist=SLUGIFY_WHITELIST, + split_chars=SLUGIFY_SEP_WHITELIST, + min_length=1, + lower=False, + make_set=False, +): + """Slugify a text string. + + This function removes transliterates input string to ASCII, removes + special characters and use join resulting elements using + specified separator. + + Args: + input_string (str): Input string to slugify + separator (str): A string used to separate returned elements + (default: "_") + slug_whitelist (str): Characters allowed in the output + (default: ascii letters, digits and the separator) + split_chars (str): Set of characters used for word splitting + (there is a sane default) + lower (bool): Convert to lower-case (default: False) + make_set (bool): Return "set" object instead of string. + min_length (int): Minimal length of an element (word). + + Returns: + Union[str, Set[str]]: Based on 'make_set' value returns slugified + string. + """ + + tmp_string = unidecode.unidecode(input_string) + if lower: + tmp_string = tmp_string.lower() + + parts = [ + # Remove all characters that are not in whitelist + re.sub("[^{}]".format(re.escape(slug_whitelist)), "", part) + # Split text into part by split characters + for part in re.split("[{}]".format(re.escape(split_chars)), tmp_string) + ] + # Filter text parts by length + filtered_parts = [ + part + for part in parts + if len(part) >= min_length + ] + if make_set: + return set(filtered_parts) + return separator.join(filtered_parts) + + +def failed_json_default(value): + return "< Failed value {} > {}".format(type(value), str(value)) + + +def prepare_attribute_changes(old_entity, new_entity, replace=False): + attrib_changes = {} + new_attrib = new_entity.get("attrib") + old_attrib = old_entity.get("attrib") + if new_attrib is None: + if not replace: + return attrib_changes + new_attrib = {} + + if old_attrib is None: + return new_attrib + + for attr, new_attr_value in new_attrib.items(): + old_attr_value = old_attrib.get(attr) + if old_attr_value != new_attr_value: + attrib_changes[attr] = new_attr_value + + if replace: + for attr in old_attrib: + if attr not in new_attrib: + attrib_changes[attr] = REMOVED_VALUE + + return attrib_changes + + +def prepare_entity_changes(old_entity, new_entity, replace=False): + """Prepare changes of entities.""" + + changes = {} + for key, new_value in new_entity.items(): + if key == "attrib": + continue + + old_value = old_entity.get(key) + if old_value != new_value: + changes[key] = new_value + + if replace: + for key in old_entity: + if key not in new_entity: + changes[key] = REMOVED_VALUE + + attr_changes = prepare_attribute_changes(old_entity, new_entity, replace) + if attr_changes: + changes["attrib"] = attr_changes + return changes + + +def _try_parse_url(url): + try: + return urlparse(url) + except BaseException: + return None + + +def _try_connect_to_server(url): + try: + # TODO add validation if the url lead to Ayon server + # - thiw won't validate if the url lead to 'google.com' + requests.get(url) + + except BaseException: + return False + return True + + +def login_to_server(url, username, password): + """Use login to the server to receive token. + + Args: + url (str): Server url. + username (str): User's username. + password (str): User's password. + + Returns: + Union[str, None]: User's token if login was successfull. + Otherwise 'None'. + """ + + headers = {"Content-Type": "application/json"} + response = requests.post( + "{}/api/auth/login".format(url), + headers=headers, + json={ + "name": username, + "password": password + } + ) + token = None + # 200 - success + # 401 - invalid credentials + # * - other issues + if response.status_code == 200: + token = response.json()["token"] + return token + + +def logout_from_server(url, token): + """Logout from server and throw token away. + + Args: + url (str): Url from which should be logged out. + token (str): Token which should be used to log out. + """ + + headers = { + "Content-Type": "application/json", + "Authorization": "Bearer {}".format(token) + } + requests.post( + url + "/api/auth/logout", + headers=headers + ) + + +def is_token_valid(url, token): + """Check if token is valid. + + Args: + url (str): Server url. + token (str): User's token. + + Returns: + bool: True if token is valid. + """ + + headers = { + "Content-Type": "application/json", + "Authorization": "Bearer {}".format(token) + } + response = requests.get( + "{}/api/users/me".format(url), + headers=headers + ) + return response.status_code == 200 + + +def validate_url(url): + """Validate url if is valid and server is available. + + Validation checks if can be parsed as url and contains scheme. + + Function will try to autofix url thus will return modified url when + connection to server works. + + ```python + my_url = "my.server.url" + try: + # Store new url + validated_url = validate_url(my_url) + + except UrlError: + # Handle invalid url + ... + ``` + + Args: + url (str): Server url. + + Returns: + Url which was used to connect to server. + + Raises: + UrlError: Error with short description and hints for user. + """ + + stripperd_url = url.strip() + if not stripperd_url: + raise UrlError( + "Invalid url format. Url is empty.", + title="Invalid url format", + hints=["url seems to be empty"] + ) + + # Not sure if this is good idea? + modified_url = stripperd_url.rstrip("/") + parsed_url = _try_parse_url(modified_url) + universal_hints = [ + "does the url work in browser?" + ] + if parsed_url is None: + raise UrlError( + "Invalid url format. Url cannot be parsed as url \"{}\".".format( + modified_url + ), + title="Invalid url format", + hints=universal_hints + ) + + # Try add 'https://' scheme if is missing + # - this will trigger UrlError if both will crash + if not parsed_url.scheme: + new_url = "https://" + modified_url + if _try_connect_to_server(new_url): + return new_url + + if _try_connect_to_server(modified_url): + return modified_url + + hints = [] + if "/" in parsed_url.path or not parsed_url.scheme: + new_path = parsed_url.path.split("/")[0] + if not parsed_url.scheme: + new_path = "https://" + new_path + + hints.append( + "did you mean \"{}\"?".format(parsed_url.scheme + new_path) + ) + + raise UrlError( + "Couldn't connect to server on \"{}\"".format(url), + title="Couldn't connect to server", + hints=hints + universal_hints + ) + + +class TransferProgress: + """Object to store progress of download/upload from/to server.""" + + def __init__(self): + self._started = False + self._transfer_done = False + self._transfered = 0 + self._content_size = None + + self._failed = False + self._fail_reason = None + + self._source_url = "N/A" + self._destination_url = "N/A" + + def get_content_size(self): + return self._content_size + + def set_content_size(self, content_size): + if self._content_size is not None: + raise ValueError("Content size was set more then once") + self._content_size = content_size + + def get_started(self): + return self._started + + def set_started(self): + if self._started: + raise ValueError("Progress already started") + self._started = True + + def get_transfer_done(self): + return self._transfer_done + + def set_transfer_done(self): + if self._transfer_done: + raise ValueError("Progress was already marked as done") + if not self._started: + raise ValueError("Progress didn't start yet") + self._transfer_done = True + + def get_failed(self): + return self._failed + + def get_fail_reason(self): + return self._fail_reason + + def set_failed(self, reason): + self._fail_reason = reason + self._failed = True + + def get_transferred_size(self): + return self._transfered + + def set_transferred_size(self, transfered): + self._transfered = transfered + + def add_transferred_chunk(self, chunk_size): + self._transfered += chunk_size + + def get_source_url(self): + return self._source_url + + def set_source_url(self, url): + self._source_url = url + + def get_destination_url(self): + return self._destination_url + + def set_destination_url(self, url): + self._destination_url = url + + @property + def is_running(self): + if ( + not self.started + or self.done + or self.failed + ): + return False + return True + + @property + def transfer_progress(self): + if self._content_size is None: + return None + return (self._transfered * 100.0) / float(self._content_size) + + content_size = property(get_content_size, set_content_size) + started = property(get_started) + transfer_done = property(get_transfer_done) + failed = property(get_failed) + fail_reason = property(get_fail_reason) + source_url = property(get_source_url, set_source_url) + destination_url = property(get_destination_url, set_destination_url) + content_size = property(get_content_size, set_content_size) + transferred_size = property(get_transferred_size, set_transferred_size) diff --git a/openpype/vendor/python/ayon/ayon_api/version.py b/openpype/vendor/python/ayon/ayon_api/version.py new file mode 100644 index 0000000000..a65f885820 --- /dev/null +++ b/openpype/vendor/python/ayon/ayon_api/version.py @@ -0,0 +1,2 @@ +"""Package declaring Python API for Ayon server.""" +__version__ = "0.1.16" \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index f915832fb8..ee7003d565 100644 --- a/poetry.lock +++ b/poetry.lock @@ -302,24 +302,6 @@ files = [ pycodestyle = ">=2.10.0" tomli = {version = "*", markers = "python_version < \"3.11\""} -[[package]] -name = "ayon-python-api" -version = "0.1.16" -description = "AYON Python API" -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "ayon-python-api-0.1.16.tar.gz", hash = "sha256:666110954dd75b2be1699a29b4732cfb0bcb09d01f64fba4449bfc8ac1fb43f1"}, - {file = "ayon_python_api-0.1.16-py3-none-any.whl", hash = "sha256:bbcd6df1f80ddf32e653a1bb31289cb5fd1a8bea36ab4c8e6aef08c41b6393de"}, -] - -[package.dependencies] -appdirs = ">=1,<2" -requests = ">=2.27.1" -six = ">=1.15" -Unidecode = ">=1.2.0" - [[package]] name = "babel" version = "2.11.0" diff --git a/pyproject.toml b/pyproject.toml index ebd7ea127d..2427c447c3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,6 @@ requests = "^2.25.1" pysftp = "^0.2.9" dropbox = "^11.20.0" aiohttp-middlewares = "^2.0.0" -ayon-python-api = "^0.1" opencolorio = "^2.2.0" Unidecode = "^1.2"