vendorize ayon api (#4753)

This commit is contained in:
Jakub Trllo 2023-03-30 18:10:46 +02:00 committed by Jakub Trllo
parent e9b1713975
commit db6222fd0d
16 changed files with 9882 additions and 19 deletions

View file

@ -96,6 +96,19 @@ else:
sys.path.append(_dependencies_path)
_python_paths.append(_dependencies_path)
# -------------------------------------------------
# Temporary solution to add ayon_api to python path
# -------------------------------------------------
# This is to avoid need of new build & release when ayon-python-api is updated.
ayon_dependency_dir = os.path.join(
AYON_ROOT, "openpype", "vendor", "python", "ayon"
)
if ayon_dependency_dir in _python_paths:
_python_paths.remove(ayon_dependency_dir)
_python_paths.insert(0, _dependencies_path)
sys.path.insert(0, ayon_dependency_dir)
# -------------------------------------------------
# Vendored python modules that must not be in PYTHONPATH environment but
# are required for OpenPype processes
sys.path.insert(0, os.path.join(AYON_ROOT, "vendor", "python"))

View file

@ -0,0 +1,256 @@
from .utils import (
TransferProgress,
slugify_string,
)
from .server_api import (
ServerAPI,
)
from ._api import (
GlobalServerAPI,
ServiceContext,
init_service,
get_service_name,
get_service_addon_name,
get_service_addon_version,
get_service_addon_settings,
is_connection_created,
create_connection,
close_connection,
change_token,
set_environments,
get_server_api_connection,
get_site_id,
set_site_id,
get_client_version,
set_client_version,
get_default_settings_variant,
set_default_settings_variant,
get_base_url,
get_rest_url,
raw_get,
raw_post,
raw_put,
raw_patch,
raw_delete,
get,
post,
put,
patch,
delete,
get_event,
get_events,
dispatch_event,
update_event,
enroll_event_job,
download_file,
upload_file,
query_graphql,
get_addons_info,
download_addon_private_file,
get_dependencies_info,
update_dependency_info,
download_dependency_package,
upload_dependency_package,
delete_dependency_package,
get_user,
get_users,
get_attributes_for_type,
get_default_fields_for_type,
get_project_anatomy_preset,
get_project_anatomy_presets,
get_project_roots_by_site,
get_project_roots_for_site,
get_addon_site_settings_schema,
get_addon_settings_schema,
get_addon_studio_settings,
get_addon_project_settings,
get_addon_settings,
get_addons_studio_settings,
get_addons_project_settings,
get_addons_settings,
get_projects,
get_project,
create_project,
delete_project,
get_folder_by_id,
get_folder_by_name,
get_folder_by_path,
get_folders,
get_tasks,
get_folder_ids_with_subsets,
get_subset_by_id,
get_subset_by_name,
get_subsets,
get_subset_families,
get_version_by_id,
get_version_by_name,
version_is_latest,
get_versions,
get_hero_version_by_subset_id,
get_hero_version_by_id,
get_hero_versions,
get_last_versions,
get_last_version_by_subset_id,
get_last_version_by_subset_name,
get_representation_by_id,
get_representation_by_name,
get_representations,
get_representations_parents,
get_representation_parents,
get_repre_ids_by_context_filters,
create_thumbnail,
get_thumbnail,
get_folder_thumbnail,
get_version_thumbnail,
get_workfile_thumbnail,
)
__all__ = (
"TransferProgress",
"slugify_string",
"ServerAPI",
"GlobalServerAPI",
"ServiceContext",
"init_service",
"get_service_name",
"get_service_addon_name",
"get_service_addon_version",
"get_service_addon_settings",
"is_connection_created",
"create_connection",
"close_connection",
"change_token",
"set_environments",
"get_server_api_connection",
"get_site_id",
"set_site_id",
"get_client_version",
"set_client_version",
"get_default_settings_variant",
"set_default_settings_variant",
"get_base_url",
"get_rest_url",
"raw_get",
"raw_post",
"raw_put",
"raw_patch",
"raw_delete",
"get",
"post",
"put",
"patch",
"delete",
"get_event",
"get_events",
"dispatch_event",
"update_event",
"enroll_event_job",
"download_file",
"upload_file",
"query_graphql",
"get_addons_info",
"download_addon_private_file",
"get_dependencies_info",
"update_dependency_info",
"download_dependency_package",
"upload_dependency_package",
"delete_dependency_package",
"get_user",
"get_users",
"get_attributes_for_type",
"get_default_fields_for_type",
"get_project_anatomy_preset",
"get_project_anatomy_presets",
"get_project_roots_by_site",
"get_project_roots_for_site",
"get_addon_site_settings_schema",
"get_addon_settings_schema",
"get_addon_studio_settings",
"get_addon_project_settings",
"get_addon_settings",
"get_addons_studio_settings",
"get_addons_project_settings",
"get_addons_settings",
"get_projects",
"get_project",
"create_project",
"delete_project",
"get_folder_by_id",
"get_folder_by_name",
"get_folder_by_path",
"get_folders",
"get_tasks",
"get_folder_ids_with_subsets",
"get_subset_by_id",
"get_subset_by_name",
"get_subsets",
"get_subset_families",
"get_version_by_id",
"get_version_by_name",
"version_is_latest",
"get_versions",
"get_hero_version_by_subset_id",
"get_hero_version_by_id",
"get_hero_versions",
"get_last_versions",
"get_last_version_by_subset_id",
"get_last_version_by_subset_name",
"get_representation_by_id",
"get_representation_by_name",
"get_representations",
"get_representations_parents",
"get_representation_parents",
"get_repre_ids_by_context_filters",
"create_thumbnail",
"get_thumbnail",
"get_folder_thumbnail",
"get_version_thumbnail",
"get_workfile_thumbnail",
)

View file

@ -0,0 +1,811 @@
"""Singleton based server api for direct access.
This implementation will be probably the most used part of package. Gives
option to have singleton connection to Server URL based on environment variable
values. All public functions and classes are imported in '__init__.py' so
they're available directly in top module import.
"""
import os
import socket
from .constants import (
SERVER_URL_ENV_KEY,
SERVER_TOKEN_ENV_KEY,
)
from .server_api import ServerAPI
from .exceptions import FailedServiceInit
class GlobalServerAPI(ServerAPI):
"""Extended server api which also handles storing tokens and url.
Created object expect to have set environment variables
'AYON_SERVER_URL'. Also is expecting filled 'AYON_TOKEN'
but that can be filled afterwards with calling 'login' method.
"""
def __init__(self, site_id=None, client_version=None):
url = self.get_url()
token = self.get_token()
super(GlobalServerAPI, self).__init__(url, token, site_id, client_version)
self.validate_server_availability()
self.create_session()
def login(self, username, password):
"""Login to the server or change user.
If user is the same as current user and token is available the
login is skipped.
"""
previous_token = self._access_token
super(GlobalServerAPI, self).login(username, password)
if self.has_valid_token and previous_token != self._access_token:
os.environ[SERVER_TOKEN_ENV_KEY] = self._access_token
@staticmethod
def get_url():
return os.environ.get(SERVER_URL_ENV_KEY)
@staticmethod
def get_token():
return os.environ.get(SERVER_TOKEN_ENV_KEY)
@staticmethod
def set_environments(url, token):
"""Change url and token environemnts in currently running process.
Args:
url (str): New server url.
token (str): User's token.
"""
os.environ[SERVER_URL_ENV_KEY] = url or ""
os.environ[SERVER_TOKEN_ENV_KEY] = token or ""
class GlobalContext:
"""Singleton connection holder.
Goal is to avoid create connection on import which can be dangerous in
some cases.
"""
_connection = None
@classmethod
def is_connection_created(cls):
return cls._connection is not None
@classmethod
def change_token(cls, url, token):
GlobalServerAPI.set_environments(url, token)
if cls._connection is None:
return
if cls._connection.get_base_url() == url:
cls._connection.set_token(token)
else:
cls.close_connection()
@classmethod
def close_connection(cls):
if cls._connection is not None:
cls._connection.close_session()
cls._connection = None
@classmethod
def create_connection(cls, *args, **kwargs):
if cls._connection is not None:
cls.close_connection()
cls._connection = GlobalServerAPI(*args, **kwargs)
return cls._connection
@classmethod
def get_server_api_connection(cls):
if cls._connection is None:
cls.create_connection()
return cls._connection
class ServiceContext:
"""Helper for services running under server.
When service is running from server the process receives information about
connection from environment variables. This class helps to initialize the
values without knowing environment variables (that may change over time).
All what must be done is to call 'init_service' function/method. The
arguments are for cases when the service is running in specific environment
and their values are e.g. loaded from private file or for testing purposes.
"""
token = None
server_url = None
addon_name = None
addon_version = None
service_name = None
@staticmethod
def get_value_from_envs(env_keys, value=None):
if value:
return value
for env_key in env_keys:
value = os.environ.get(env_key)
if value:
break
return value
@classmethod
def init_service(
cls,
token=None,
server_url=None,
addon_name=None,
addon_version=None,
service_name=None,
connect=True
):
token = cls.get_value_from_envs(
("AY_API_KEY", "AYON_TOKEN"),
token
)
server_url = cls.get_value_from_envs(
("AY_SERVER_URL", "AYON_SERVER_URL"),
server_url
)
if not server_url:
raise FailedServiceInit("URL to server is not set")
if not token:
raise FailedServiceInit(
"Token to server {} is not set".format(server_url)
)
addon_name = cls.get_value_from_envs(
("AY_ADDON_NAME", "AYON_ADDON_NAME"),
addon_name
)
addon_version = cls.get_value_from_envs(
("AY_ADDON_VERSION", "AYON_ADDON_VERSION"),
addon_version
)
service_name = cls.get_value_from_envs(
("AY_SERVICE_NAME", "AYON_SERVICE_NAME"),
service_name
)
cls.token = token
cls.server_url = server_url
cls.addon_name = addon_name
cls.addon_version = addon_version
cls.service_name = service_name or socket.gethostname()
# Make sure required environments for GlobalServerAPI are set
GlobalServerAPI.set_environments(cls.server_url, cls.token)
if connect:
print("Connecting to server \"{}\"".format(server_url))
con = GlobalContext.get_server_api_connection()
user = con.get_user()
print("Logged in as user \"{}\"".format(user["name"]))
def init_service(*args, **kwargs):
"""Initialize current connection from service.
The service expect specific environment variables. The variables must all
be set to make the connection work as a service.
"""
ServiceContext.init_service(*args, **kwargs)
def get_service_addon_name():
"""Name of addon which initialized service connection.
Service context must be initialized to be able to use this function. Call
'init_service' on you service start to do so.
Returns:
Union[str, None]: Name of addon or None.
"""
return ServiceContext.addon_name
def get_service_addon_version():
"""Version of addon which initialized service connection.
Service context must be initialized to be able to use this function. Call
'init_service' on you service start to do so.
Returns:
Union[str, None]: Version of addon or None.
"""
return ServiceContext.addon_version
def get_service_name():
"""Name of service.
Service context must be initialized to be able to use this function. Call
'init_service' on you service start to do so.
Returns:
Union[str, None]: Name of service if service was registered.
"""
return ServiceContext.service_name
def get_service_addon_settings():
"""Addon settings of service which initialized service.
Service context must be initialized to be able to use this function. Call
'init_service' on you service start to do so.
Returns:
Dict[str, Any]: Addon settings.
Raises:
ValueError: When service was not initialized.
"""
addon_name = get_service_addon_name()
addon_version = get_service_addon_version()
if addon_name is None or addon_version is None:
raise ValueError("Service is not initialized")
return get_addon_settings(addon_name, addon_version)
def is_connection_created():
"""Is global connection created.
Returns:
bool: True if connection was connected.
"""
return GlobalContext.is_connection_created()
def create_connection(site_id=None, client_version=None):
"""Create global connection.
Args:
site_id (str): Machine site id/name.
client_version (str): Desktop app version.
Returns:
GlobalServerAPI: Created connection.
"""
return GlobalContext.create_connection(site_id, client_version)
def close_connection():
"""Close global connection if is connected."""
GlobalContext.close_connection()
def change_token(url, token):
"""Change connection token for url.
This function can be also used to change url.
Args:
url (str): Server url.
token (str): API key token.
"""
GlobalContext.change_token(url, token)
def set_environments(url, token):
"""Set global environments for global connection.
Args:
url (Union[str, None]): Url to server or None to unset environments.
token (Union[str, None]): API key token to be used for connection.
"""
GlobalServerAPI.set_environments(url, token)
def get_server_api_connection():
"""Access to global scope object of GlobalServerAPI.
This access expect to have set environment variables 'AYON_SERVER_URL'
and 'AYON_TOKEN'.
Returns:
GlobalServerAPI: Object of connection to server.
"""
return GlobalContext.get_server_api_connection()
def get_site_id():
con = get_server_api_connection()
return con.get_site_id()
def set_site_id(site_id):
"""Set site id of already connected client connection.
Site id is human-readable machine id used in AYON desktop application.
Args:
site_id (Union[str, None]): Site id used in connection.
"""
con = get_server_api_connection()
con.set_site_id(site_id)
def get_client_version():
"""Version of client used to connect to server.
Client version is AYON client build desktop application.
Returns:
str: Client version string used in connection.
"""
con = get_server_api_connection()
return con.get_client_version()
def set_client_version(client_version):
"""Set version of already connected client connection.
Client version is version of AYON desktop application.
Args:
client_version (Union[str, None]): Client version string.
"""
con = get_server_api_connection()
con.set_client_version(client_version)
def get_default_settings_variant():
"""Default variant used for settings.
Returns:
Union[str, None]: name of variant or None.
"""
con = get_server_api_connection()
return con.get_client_version()
def set_default_settings_variant(variant):
"""Change default variant for addon settings.
Note:
It is recommended to set only 'production' or 'staging' variants
as default variant.
Args:
variant (Union[str, None]): Settings variant name.
"""
con = get_server_api_connection()
return con.set_default_settings_variant(variant)
def get_base_url():
con = get_server_api_connection()
return con.get_base_url()
def get_rest_url():
con = get_server_api_connection()
return con.get_rest_url()
def raw_get(*args, **kwargs):
con = get_server_api_connection()
return con.raw_get(*args, **kwargs)
def raw_post(*args, **kwargs):
con = get_server_api_connection()
return con.raw_post(*args, **kwargs)
def raw_put(*args, **kwargs):
con = get_server_api_connection()
return con.raw_put(*args, **kwargs)
def raw_patch(*args, **kwargs):
con = get_server_api_connection()
return con.raw_patch(*args, **kwargs)
def raw_delete(*args, **kwargs):
con = get_server_api_connection()
return con.raw_delete(*args, **kwargs)
def get(*args, **kwargs):
con = get_server_api_connection()
return con.get(*args, **kwargs)
def post(*args, **kwargs):
con = get_server_api_connection()
return con.post(*args, **kwargs)
def put(*args, **kwargs):
con = get_server_api_connection()
return con.put(*args, **kwargs)
def patch(*args, **kwargs):
con = get_server_api_connection()
return con.patch(*args, **kwargs)
def delete(*args, **kwargs):
con = get_server_api_connection()
return con.delete(*args, **kwargs)
def get_event(*args, **kwargs):
con = get_server_api_connection()
return con.get_event(*args, **kwargs)
def get_events(*args, **kwargs):
con = get_server_api_connection()
return con.get_events(*args, **kwargs)
def dispatch_event(*args, **kwargs):
con = get_server_api_connection()
return con.dispatch_event(*args, **kwargs)
def update_event(*args, **kwargs):
con = get_server_api_connection()
return con.update_event(*args, **kwargs)
def enroll_event_job(*args, **kwargs):
con = get_server_api_connection()
return con.enroll_event_job(*args, **kwargs)
def download_file(*args, **kwargs):
con = get_server_api_connection()
return con.download_file(*args, **kwargs)
def upload_file(*args, **kwargs):
con = get_server_api_connection()
return con.upload_file(*args, **kwargs)
def query_graphql(*args, **kwargs):
con = get_server_api_connection()
return con.query_graphql(*args, **kwargs)
def get_users(*args, **kwargs):
con = get_server_api_connection()
return con.get_users(*args, **kwargs)
def get_user(*args, **kwargs):
con = get_server_api_connection()
return con.get_user(*args, **kwargs)
def get_attributes_for_type(*args, **kwargs):
con = get_server_api_connection()
return con.get_attributes_for_type(*args, **kwargs)
def get_addons_info(*args, **kwargs):
con = get_server_api_connection()
return con.get_addons_info(*args, **kwargs)
def download_addon_private_file(*args, **kwargs):
con = get_server_api_connection()
return con.download_addon_private_file(*args, **kwargs)
def get_dependencies_info(*args, **kwargs):
con = get_server_api_connection()
return con.get_dependencies_info(*args, **kwargs)
def update_dependency_info(*args, **kwargs):
con = get_server_api_connection()
return con.update_dependency_info(*args, **kwargs)
def download_dependency_package(*args, **kwargs):
con = get_server_api_connection()
return con.download_dependency_package(*args, **kwargs)
def upload_dependency_package(*args, **kwargs):
con = get_server_api_connection()
return con.upload_dependency_package(*args, **kwargs)
def delete_dependency_package(*args, **kwargs):
con = get_server_api_connection()
return con.delete_dependency_package(*args, **kwargs)
def get_project_anatomy_presets(*args, **kwargs):
con = get_server_api_connection()
return con.get_project_anatomy_presets(*args, **kwargs)
def get_project_anatomy_preset(*args, **kwargs):
con = get_server_api_connection()
return con.get_project_anatomy_preset(*args, **kwargs)
def get_project_roots_by_site(*args, **kwargs):
con = get_server_api_connection()
return con.get_project_roots_by_site(*args, **kwargs)
def get_project_roots_for_site(*args, **kwargs):
con = get_server_api_connection()
return con.get_project_roots_for_site(*args, **kwargs)
def get_addon_settings_schema(*args, **kwargs):
con = get_server_api_connection()
return con.get_addon_settings_schema(*args, **kwargs)
def get_addon_site_settings_schema(*args, **kwargs):
con = get_server_api_connection()
return con.get_addon_site_settings_schema(*args, **kwargs)
def get_addon_studio_settings(*args, **kwargs):
con = get_server_api_connection()
return con.get_addon_studio_settings(*args, **kwargs)
def get_addon_project_settings(*args, **kwargs):
con = get_server_api_connection()
return con.get_addon_project_settings(*args, **kwargs)
def get_addon_settings(*args, **kwargs):
con = get_server_api_connection()
return con.get_addon_settings(*args, **kwargs)
def get_addon_site_settings(*args, **kwargs):
con = get_server_api_connection()
return con.get_addon_site_settings(*args, **kwargs)
def get_addons_studio_settings(*args, **kwargs):
con = get_server_api_connection()
return con.get_addons_studio_settings(*args, **kwargs)
def get_addons_project_settings(*args, **kwargs):
con = get_server_api_connection()
return con.get_addons_project_settings(*args, **kwargs)
def get_addons_settings(*args, **kwargs):
con = get_server_api_connection()
return con.get_addons_settings(*args, **kwargs)
def get_project(*args, **kwargs):
con = get_server_api_connection()
return con.get_project(*args, **kwargs)
def get_projects(*args, **kwargs):
con = get_server_api_connection()
return con.get_projects(*args, **kwargs)
def get_folders(*args, **kwargs):
con = get_server_api_connection()
return con.get_folders(*args, **kwargs)
def get_tasks(*args, **kwargs):
con = get_server_api_connection()
return con.get_tasks(*args, **kwargs)
def get_folder_by_id(*args, **kwargs):
con = get_server_api_connection()
return con.get_folder_by_id(*args, **kwargs)
def get_folder_by_path(*args, **kwargs):
con = get_server_api_connection()
return con.get_folder_by_path(*args, **kwargs)
def get_folder_by_name(*args, **kwargs):
con = get_server_api_connection()
return con.get_folder_by_name(*args, **kwargs)
def get_folder_ids_with_subsets(*args, **kwargs):
con = get_server_api_connection()
return con.get_folder_ids_with_subsets(*args, **kwargs)
def get_subsets(*args, **kwargs):
con = get_server_api_connection()
return con.get_subsets(*args, **kwargs)
def get_subset_by_id(*args, **kwargs):
con = get_server_api_connection()
return con.get_subset_by_id(*args, **kwargs)
def get_subset_by_name(*args, **kwargs):
con = get_server_api_connection()
return con.get_subset_by_name(*args, **kwargs)
def get_subset_families(*args, **kwargs):
con = get_server_api_connection()
return con.get_subset_families(*args, **kwargs)
def get_versions(*args, **kwargs):
con = get_server_api_connection()
return con.get_versions(*args, **kwargs)
def get_version_by_id(*args, **kwargs):
con = get_server_api_connection()
return con.get_version_by_id(*args, **kwargs)
def get_version_by_name(*args, **kwargs):
con = get_server_api_connection()
return con.get_version_by_name(*args, **kwargs)
def get_hero_version_by_id(*args, **kwargs):
con = get_server_api_connection()
return con.get_hero_version_by_id(*args, **kwargs)
def get_hero_version_by_subset_id(*args, **kwargs):
con = get_server_api_connection()
return con.get_hero_version_by_subset_id(*args, **kwargs)
def get_hero_versions(*args, **kwargs):
con = get_server_api_connection()
return con.get_hero_versions(*args, **kwargs)
def get_last_versions(*args, **kwargs):
con = get_server_api_connection()
return con.get_last_versions(*args, **kwargs)
def get_last_version_by_subset_id(*args, **kwargs):
con = get_server_api_connection()
return con.get_last_version_by_subset_id(*args, **kwargs)
def get_last_version_by_subset_name(*args, **kwargs):
con = get_server_api_connection()
return con.get_last_version_by_subset_name(*args, **kwargs)
def version_is_latest(*args, **kwargs):
con = get_server_api_connection()
return con.version_is_latest(*args, **kwargs)
def get_representations(*args, **kwargs):
con = get_server_api_connection()
return con.get_representations(*args, **kwargs)
def get_representation_by_id(*args, **kwargs):
con = get_server_api_connection()
return con.get_representation_by_id(*args, **kwargs)
def get_representation_by_name(*args, **kwargs):
con = get_server_api_connection()
return con.get_representation_by_name(*args, **kwargs)
def get_representation_parents(*args, **kwargs):
con = get_server_api_connection()
return con.get_representation_parents(*args, **kwargs)
def get_representations_parents(*args, **kwargs):
con = get_server_api_connection()
return con.get_representations_parents(*args, **kwargs)
def get_repre_ids_by_context_filters(*args, **kwargs):
con = get_server_api_connection()
return con.get_repre_ids_by_context_filters(*args, **kwargs)
def create_project(
project_name,
project_code,
library_project=False,
preset_name=None
):
con = get_server_api_connection()
return con.create_project(
project_name,
project_code,
library_project,
preset_name
)
def delete_project(project_name):
con = get_server_api_connection()
return con.delete_project(project_name)
def create_thumbnail(project_name, src_filepath):
con = get_server_api_connection()
return con.create_thumbnail(project_name, src_filepath)
def get_thumbnail(project_name, entity_type, entity_id, thumbnail_id=None):
con = get_server_api_connection()
con.get_thumbnail(project_name, entity_type, entity_id, thumbnail_id)
def get_folder_thumbnail(project_name, folder_id, thumbnail_id=None):
con = get_server_api_connection()
return con.get_folder_thumbnail(project_name, folder_id, thumbnail_id)
def get_version_thumbnail(project_name, version_id, thumbnail_id=None):
con = get_server_api_connection()
return con.get_version_thumbnail(project_name, version_id, thumbnail_id)
def get_workfile_thumbnail(project_name, workfile_id, thumbnail_id=None):
con = get_server_api_connection()
return con.get_workfile_thumbnail(project_name, workfile_id, thumbnail_id)
def create_thumbnail(project_name, src_filepath):
con = get_server_api_connection()
return con.create_thumbnail(project_name, src_filepath)
def get_default_fields_for_type(entity_type):
con = get_server_api_connection()
return con.get_default_fields_for_type(entity_type)

View file

@ -0,0 +1,105 @@
SERVER_URL_ENV_KEY = "AYON_SERVER_URL"
SERVER_TOKEN_ENV_KEY = "AYON_TOKEN"
# --- Project ---
DEFAULT_PROJECT_FIELDS = {
"active",
"name",
"code",
"config",
"createdAt",
}
# --- Folders ---
DEFAULT_FOLDER_FIELDS = {
"id",
"name",
"label",
"folderType",
"path",
"parentId",
"active",
"thumbnailId",
}
# --- Tasks ---
DEFAULT_TASK_FIELDS = {
"id",
"name",
"label",
"taskType",
"folderId",
"active",
"assignees",
}
# --- Subsets ---
DEFAULT_SUBSET_FIELDS = {
"id",
"name",
"folderId",
"active",
"family",
}
# --- Versions ---
DEFAULT_VERSION_FIELDS = {
"id",
"name",
"version",
"subsetId",
"taskId",
"active",
"author",
"thumbnailId",
"createdAt",
"updatedAt",
}
# --- Representations ---
DEFAULT_REPRESENTATION_FIELDS = {
"id",
"name",
"context",
"createdAt",
"active",
"versionId",
}
REPRESENTATION_FILES_FIELDS = {
"files.name",
"files.hash",
"files.id",
"files.path",
"files.size",
}
# --- Workfile info ---
DEFAULT_WORKFILE_INFO_FIELDS = {
"active",
"createdAt",
"createdBy",
"id",
"name",
"path",
"projectName",
"taskId",
"thumbnailId",
"updatedAt",
"updatedBy",
}
DEFAULT_EVENT_FIELDS = {
"id",
"hash",
"createdAt",
"dependsOn",
"description",
"project",
"retries",
"sender",
"status",
"topic",
"updatedAt",
"user",
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,52 @@
import copy
class ServerEvent(object):
def __init__(
self,
topic,
sender=None,
event_hash=None,
project_name=None,
username=None,
dependencies=None,
description=None,
summary=None,
payload=None,
finished=True,
store=True,
):
if dependencies is None:
dependencies = []
if payload is None:
payload = {}
if summary is None:
summary = {}
self.topic = topic
self.sender = sender
self.event_hash = event_hash
self.project_name = project_name
self.username = username
self.dependencies = dependencies
self.description = description
self.summary = summary
self.payload = payload
self.finished = finished
self.store = store
def to_data(self):
return {
"topic": self.topic,
"sender": self.sender,
"hash": self.event_hash,
"project": self.project_name,
"user": self.username,
"dependencies": copy.deepcopy(self.dependencies),
"description": self.description,
"description": self.description,
"summary": copy.deepcopy(self.summary),
"payload": self.payload,
"finished": self.finished,
"store": self.store
}

View file

@ -0,0 +1,97 @@
import copy
class UrlError(Exception):
"""Url cannot be parsed as url.
Exception may contain hints of possible fixes of url that can be used in
UI if needed.
"""
def __init__(self, message, title, hints=None):
if hints is None:
hints = []
self.title = title
self.hints = hints
super(UrlError, self).__init__(message)
class ServerError(Exception):
pass
class UnauthorizedError(ServerError):
pass
class AuthenticationError(ServerError):
pass
class ServerNotReached(ServerError):
pass
class GraphQlQueryFailed(Exception):
def __init__(self, errors, query, variables):
if variables is None:
variables = {}
error_messages = []
for error in errors:
msg = error["message"]
path = error.get("path")
if path:
msg += " on item '{}'".format("/".join(path))
locations = error.get("locations")
if locations:
_locations = [
"Line {} Column {}".format(
location["line"], location["column"]
)
for location in locations
]
msg += " ({})".format(" and ".join(_locations))
error_messages.append(msg)
message = "GraphQl query Failed"
if error_messages:
message = "{}: {}".format(message, " | ".join(error_messages))
self.errors = errors
self.query = query
self.variables = copy.deepcopy(variables)
super(GraphQlQueryFailed, self).__init__(message)
class MissingEntityError(Exception):
pass
class ProjectNotFound(MissingEntityError):
def __init__(self, project_name, message=None):
if not message:
message = "Project \"{}\" was not found".format(project_name)
self.project_name = project_name
super(ProjectNotFound, self).__init__(message)
class FolderNotFound(MissingEntityError):
def __init__(self, project_name, folder_id, message=None):
self.project_name = project_name
self.folder_id = folder_id
if not message:
message = (
"Folder with id \"{}\" was not found in project \"{}\""
).format(folder_id, project_name)
super(FolderNotFound, self).__init__(message)
class FailedOperations(Exception):
pass
class FailedServiceInit(Exception):
pass

View file

@ -0,0 +1,896 @@
import copy
import numbers
from abc import ABCMeta, abstractproperty, abstractmethod
import six
from .exceptions import GraphQlQueryFailed
FIELD_VALUE = object()
def fields_to_dict(fields):
if not fields:
return None
output = {}
for field in fields:
hierarchy = field.split(".")
last = hierarchy.pop(-1)
value = output
for part in hierarchy:
if value is FIELD_VALUE:
break
if part not in value:
value[part] = {}
value = value[part]
if value is not FIELD_VALUE:
value[last] = FIELD_VALUE
return output
class QueryVariable(object):
"""Object representing single varible used in GraphQlQuery.
Variable definition is in GraphQl query header but it's value is used
in fields.
Args:
variable_name (str): Name of variable in query.
"""
def __init__(self, variable_name):
self._variable_name = variable_name
self._name = "${}".format(variable_name)
@property
def name(self):
"""Name used in field filter."""
return self._name
@property
def variable_name(self):
"""Name of variable in query definition."""
return self._variable_name
def __hash__(self):
return self._name.__hash__()
def __str__(self):
return self._name
def __format__(self, *args, **kwargs):
return self._name.__format__(*args, **kwargs)
class GraphQlQuery:
"""GraphQl query which can have fields to query.
Single use object which can be used only for one query. Object and children
objects keep track about paging and progress.
Args:
name (str): Name of query.
"""
offset = 2
def __init__(self, name):
self._name = name
self._variables = {}
self._children = []
self._has_multiple_edge_fields = None
@property
def indent(self):
"""Indentation for preparation of query string.
Returns:
int: Ident spaces.
"""
return 0
@property
def child_indent(self):
"""Indentation for preparation of query string used by children.
Returns:
int: Ident spaces for children.
"""
return self.indent
@property
def need_query(self):
"""Still need query from server.
Needed for edges which use pagination.
Returns:
bool: If still need query from server.
"""
for child in self._children:
if child.need_query:
return True
return False
@property
def has_multiple_edge_fields(self):
if self._has_multiple_edge_fields is None:
edge_counter = 0
for child in self._children:
edge_counter += child.sum_edge_fields(2)
if edge_counter > 1:
break
self._has_multiple_edge_fields = edge_counter > 1
return self._has_multiple_edge_fields
def add_variable(self, key, value_type, value=None):
"""Add variable to query.
Args:
key (str): Variable name.
value_type (str): Type of expected value in variables. This is
graphql type e.g. "[String!]", "Int", "Boolean", etc.
value (Any): Default value for variable. Can be changed later.
Returns:
QueryVariable: Created variable object.
Raises:
KeyError: If variable was already added before.
"""
if key in self._variables:
raise KeyError(
"Variable \"{}\" was already set with type {}.".format(
key, value_type
)
)
variable = QueryVariable(key)
self._variables[key] = {
"type": value_type,
"variable": variable,
"value": value
}
return variable
def get_variable(self, key):
"""Variable object.
Args:
key (str): Variable name added to headers.
Returns:
QueryVariable: Variable object used in query string.
"""
return self._variables[key]["variable"]
def get_variable_value(self, key, default=None):
"""Get Current value of variable.
Args:
key (str): Variable name.
default (Any): Default value if variable is available.
Returns:
Any: Variable value.
"""
variable_item = self._variables.get(key)
if variable_item:
return variable_item["value"]
return default
def set_variable_value(self, key, value):
"""Set value for variable.
Args:
key (str): Variable name under which the value is stored.
value (Any): Variable value used in query. Variable is not used
if value is 'None'.
"""
self._variables[key]["value"] = value
def get_variables_values(self):
"""Calculate variable values used that should be used in query.
Variables with value set to 'None' are skipped.
Returns:
Dict[str, Any]: Variable values by their name.
"""
output = {}
for key, item in self._variables.items():
value = item["value"]
if value is not None:
output[key] = item["value"]
return output
def add_obj_field(self, field):
"""Add field object to children.
Args:
field (BaseGraphQlQueryField): Add field to query children.
"""
if field in self._children:
return
self._children.append(field)
field.set_parent(self)
def add_field(self, name, has_edges=None):
"""Add field to query.
Args:
name (str): Field name e.g. 'id'.
has_edges (bool): Field has edges so it need paging.
Returns:
BaseGraphQlQueryField: Created field object.
"""
if has_edges:
item = GraphQlQueryEdgeField(name, self)
else:
item = GraphQlQueryField(name, self)
self.add_obj_field(item)
return item
def calculate_query(self):
"""Calculate query string which is sent to server.
Returns:
str: GraphQl string with variables and headers.
Raises:
ValueError: Query has no fiels.
"""
if not self._children:
raise ValueError("Missing fields to query")
variables = []
for item in self._variables.values():
if item["value"] is None:
continue
variables.append(
"{}: {}".format(item["variable"], item["type"])
)
variables_str = ""
if variables:
variables_str = "({})".format(",".join(variables))
header = "query {}{}".format(self._name, variables_str)
output = []
output.append(header + " {")
for field in self._children:
output.append(field.calculate_query())
output.append("}")
return "\n".join(output)
def parse_result(self, data, output, progress_data):
"""Parse data from response for output.
Output is stored to passed 'output' variable. That's because of paging
during which objects must have access to both new and previous values.
Args:
data (Dict[str, Any]): Data received using calculated query.
output (Dict[str, Any]): Where parsed data are stored.
"""
if not data:
return
for child in self._children:
child.parse_result(data, output, progress_data)
def query(self, con):
"""Do a query from server.
Args:
con (ServerAPI): Connection to server with 'query' method.
Returns:
Dict[str, Any]: Parsed output from GraphQl query.
"""
progress_data = {}
output = {}
while self.need_query:
query_str = self.calculate_query()
variables = self.get_variables_values()
response = con.query_graphql(
query_str,
self.get_variables_values()
)
if response.errors:
raise GraphQlQueryFailed(response.errors, query_str, variables)
self.parse_result(response.data["data"], output, progress_data)
return output
def continuous_query(self, con):
"""Do a query from server.
Args:
con (ServerAPI): Connection to server with 'query' method.
Returns:
Dict[str, Any]: Parsed output from GraphQl query.
"""
progress_data = {}
if self.has_multiple_edge_fields:
output = {}
while self.need_query:
query_str = self.calculate_query()
variables = self.get_variables_values()
response = con.query_graphql(query_str, variables)
if response.errors:
raise GraphQlQueryFailed(
response.errors, query_str, variables
)
self.parse_result(response.data["data"], output, progress_data)
yield output
else:
while self.need_query:
output = {}
query_str = self.calculate_query()
variables = self.get_variables_values()
response = con.query_graphql(query_str, variables)
if response.errors:
raise GraphQlQueryFailed(
response.errors, query_str, variables
)
self.parse_result(response.data["data"], output, progress_data)
yield output
@six.add_metaclass(ABCMeta)
class BaseGraphQlQueryField(object):
"""Field in GraphQl query.
Args:
name (str): Name of field.
parent (Union[BaseGraphQlQueryField, GraphQlQuery]): Parent object of a
field.
has_edges (bool): Field has edges and should handle paging.
"""
def __init__(self, name, parent):
if isinstance(parent, GraphQlQuery):
query_item = parent
else:
query_item = parent.query_item
self._name = name
self._parent = parent
self._filters = {}
self._children = []
# Value is changed on first parse of result
self._need_query = True
self._query_item = query_item
self._path = None
def __repr__(self):
return "<{} {}>".format(self.__class__.__name__, self.path)
@property
def need_query(self):
"""Still need query from server.
Needed for edges which use pagination. Look into children values too.
Returns:
bool: If still need query from server.
"""
if self._need_query:
return True
for child in self._children:
if child.need_query:
return True
return False
def sum_edge_fields(self, max_limit=None):
"""Check how many edge fields query has.
In case there are multiple edge fields or are nested the query can't
yield mid cursor results.
Args:
max_limit (int): Skip rest of counting if counter is bigger then
entered number.
Returns:
int: Counter edge fields
"""
counter = 0
if isinstance(self, GraphQlQueryEdgeField):
counter = 1
for child in self._children:
counter += child.sum_edge_fields(max_limit)
if max_limit is not None and counter >= max_limit:
break
return counter
@property
def offset(self):
return self._query_item.offset
@property
def indent(self):
return self._parent.child_indent + self.offset
@abstractproperty
def child_indent(self):
pass
@property
def query_item(self):
return self._query_item
@abstractproperty
def has_edges(self):
pass
@property
def child_has_edges(self):
for child in self._children:
if child.has_edges or child.child_has_edges:
return True
return False
@property
def path(self):
"""Field path for debugging purposes.
Returns:
str: Field path in query.
"""
if self._path is None:
if isinstance(self._parent, GraphQlQuery):
path = self._name
else:
path = "/".join((self._parent.path, self._name))
self._path = path
return self._path
def reset_cursor(self):
for child in self._children:
child.reset_cursor()
def get_variable_value(self, *args, **kwargs):
return self._query_item.get_variable_value(*args, **kwargs)
def set_variable_value(self, *args, **kwargs):
return self._query_item.set_variable_value(*args, **kwargs)
def set_filter(self, key, value):
self._filters[key] = value
def has_filter(self, key):
return key in self._filters
def remove_filter(self, key):
self._filters.pop(key, None)
def set_parent(self, parent):
if self._parent is parent:
return
self._parent = parent
parent.add_obj_field(self)
def add_obj_field(self, field):
if field in self._children:
return
self._children.append(field)
field.set_parent(self)
def add_field(self, name, has_edges=None):
if has_edges:
item = GraphQlQueryEdgeField(name, self)
else:
item = GraphQlQueryField(name, self)
self.add_obj_field(item)
return item
def _filter_value_to_str(self, value):
if isinstance(value, QueryVariable):
if self.get_variable_value(value.variable_name) is None:
return None
return str(value)
if isinstance(value, numbers.Number):
return str(value)
if isinstance(value, six.string_types):
return '"{}"'.format(value)
if isinstance(value, (list, set, tuple)):
return "[{}]".format(
", ".join(
self._filter_value_to_str(item)
for item in iter(value)
)
)
raise TypeError(
"Unknown type to convert '{}'".format(str(type(value)))
)
def get_filters(self):
"""Receive filters for item.
By default just use copy of set filters.
Returns:
Dict[str, Any]: Fields filters.
"""
return copy.deepcopy(self._filters)
def _filters_to_string(self):
filters = self.get_filters()
if not filters:
return ""
filter_items = []
for key, value in filters.items():
string_value = self._filter_value_to_str(value)
if string_value is None:
continue
filter_items.append("{}: {}".format(key, string_value))
if not filter_items:
return ""
return "({})".format(", ".join(filter_items))
def _fake_children_parse(self):
"""Mark children as they don't need query."""
for child in self._children:
child.parse_result({}, {}, {})
@abstractmethod
def calculate_query(self):
pass
@abstractmethod
def parse_result(self, data, output, progress_data):
pass
class GraphQlQueryField(BaseGraphQlQueryField):
has_edges = False
@property
def child_indent(self):
return self.indent
def parse_result(self, data, output, progress_data):
if not isinstance(data, dict):
raise TypeError("{} Expected 'dict' type got '{}'".format(
self._name, str(type(data))
))
self._need_query = False
value = data.get(self._name)
if value is None:
self._fake_children_parse()
if self._name in data:
output[self._name] = None
return
if not self._children:
output[self._name] = value
return
output_value = output.get(self._name)
if isinstance(value, dict):
if output_value is None:
output_value = {}
output[self._name] = output_value
for child in self._children:
child.parse_result(value, output_value, progress_data)
return
if output_value is None:
output_value = []
output[self._name] = output_value
if not value:
self._fake_children_parse()
return
diff = len(value) - len(output_value)
if diff > 0:
for _ in range(diff):
output_value.append({})
for idx, item in enumerate(value):
item_value = output_value[idx]
for child in self._children:
child.parse_result(item, item_value, progress_data)
def calculate_query(self):
offset = self.indent * " "
header = "{}{}{}".format(
offset,
self._name,
self._filters_to_string()
)
if not self._children:
return header
output = []
output.append(header + " {")
output.extend([
field.calculate_query()
for field in self._children
])
output.append(offset + "}")
return "\n".join(output)
class GraphQlQueryEdgeField(BaseGraphQlQueryField):
has_edges = True
def __init__(self, *args, **kwargs):
super(GraphQlQueryEdgeField, self).__init__(*args, **kwargs)
self._cursor = None
@property
def child_indent(self):
offset = self.offset * 2
return self.indent + offset
def reset_cursor(self):
# Reset cursor only for edges
self._cursor = None
self._need_query = True
super(GraphQlQueryEdgeField, self).reset_cursor()
def parse_result(self, data, output, progress_data):
if not isinstance(data, dict):
raise TypeError("{} Expected 'dict' type got '{}'".format(
self._name, str(type(data))
))
value = data.get(self._name)
if value is None:
self._fake_children_parse()
self._need_query = False
return
if self._name in output:
node_values = output[self._name]
else:
node_values = []
output[self._name] = node_values
handle_cursors = self.child_has_edges
if handle_cursors:
cursor_key = self._get_cursor_key()
if cursor_key in progress_data:
nodes_by_cursor = progress_data[cursor_key]
else:
nodes_by_cursor = {}
progress_data[cursor_key] = nodes_by_cursor
page_info = value["pageInfo"]
new_cursor = page_info["endCursor"]
self._need_query = page_info["hasNextPage"]
edges = value["edges"]
# Fake result parse
if not edges:
self._fake_children_parse()
for edge in edges:
if not handle_cursors:
edge_value = {}
node_values.append(edge_value)
else:
edge_cursor = edge["cursor"]
edge_value = nodes_by_cursor.get(edge_cursor)
if edge_value is None:
edge_value = {}
nodes_by_cursor[edge_cursor] = edge_value
node_values.append(edge_value)
for child in self._children:
child.parse_result(edge["node"], edge_value, progress_data)
if not self._need_query:
return
change_cursor = True
for child in self._children:
if child.need_query:
change_cursor = False
if change_cursor:
for child in self._children:
child.reset_cursor()
self._cursor = new_cursor
def _get_cursor_key(self):
return "{}/__cursor__".format(self.path)
def get_filters(self):
filters = super(GraphQlQueryEdgeField, self).get_filters()
filters["first"] = 300
if self._cursor:
filters["after"] = self._cursor
return filters
def calculate_query(self):
if not self._children:
raise ValueError("Missing child definitions for edges {}".format(
self.path
))
offset = self.indent * " "
header = "{}{}{}".format(
offset,
self._name,
self._filters_to_string()
)
output = []
output.append(header + " {")
edges_offset = offset + self.offset * " "
node_offset = edges_offset + self.offset * " "
output.append(edges_offset + "edges {")
output.append(node_offset + "node {")
for field in self._children:
output.append(
field.calculate_query()
)
output.append(node_offset + "}")
if self.child_has_edges:
output.append(node_offset + "cursor")
output.append(edges_offset + "}")
# Add page information
output.append(edges_offset + "pageInfo {")
for page_key in (
"endCursor",
"hasNextPage",
):
output.append(node_offset + page_key)
output.append(edges_offset + "}")
output.append(offset + "}")
return "\n".join(output)
INTROSPECTION_QUERY = """
query IntrospectionQuery {
__schema {
queryType { name }
mutationType { name }
subscriptionType { name }
types {
...FullType
}
directives {
name
description
locations
args {
...InputValue
}
}
}
}
fragment FullType on __Type {
kind
name
description
fields(includeDeprecated: true) {
name
description
args {
...InputValue
}
type {
...TypeRef
}
isDeprecated
deprecationReason
}
inputFields {
...InputValue
}
interfaces {
...TypeRef
}
enumValues(includeDeprecated: true) {
name
description
isDeprecated
deprecationReason
}
possibleTypes {
...TypeRef
}
}
fragment InputValue on __InputValue {
name
description
type { ...TypeRef }
defaultValue
}
fragment TypeRef on __Type {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
}
}
}
}
}
}
}
}
"""

View file

@ -0,0 +1,362 @@
import collections
from .graphql import FIELD_VALUE, GraphQlQuery
def fields_to_dict(fields):
if not fields:
return None
output = {}
for field in fields:
hierarchy = field.split(".")
last = hierarchy.pop(-1)
value = output
for part in hierarchy:
if value is FIELD_VALUE:
break
if part not in value:
value[part] = {}
value = value[part]
if value is not FIELD_VALUE:
value[last] = FIELD_VALUE
return output
def project_graphql_query(fields):
query = GraphQlQuery("ProjectQuery")
project_name_var = query.add_variable("projectName", "String!")
project_field = query.add_field("project")
project_field.set_filter("name", project_name_var)
nested_fields = fields_to_dict(fields)
query_queue = collections.deque()
for key, value in nested_fields.items():
query_queue.append((key, value, project_field))
while query_queue:
item = query_queue.popleft()
key, value, parent = item
field = parent.add_field(key)
if value is FIELD_VALUE:
continue
for k, v in value.items():
query_queue.append((k, v, field))
return query
def projects_graphql_query(fields):
query = GraphQlQuery("ProjectsQuery")
projects_field = query.add_field("projects", has_edges=True)
nested_fields = fields_to_dict(fields)
query_queue = collections.deque()
for key, value in nested_fields.items():
query_queue.append((key, value, projects_field))
while query_queue:
item = query_queue.popleft()
key, value, parent = item
field = parent.add_field(key)
if value is FIELD_VALUE:
continue
for k, v in value.items():
query_queue.append((k, v, field))
return query
def folders_graphql_query(fields):
query = GraphQlQuery("FoldersQuery")
project_name_var = query.add_variable("projectName", "String!")
folder_ids_var = query.add_variable("folderIds", "[String!]")
parent_folder_ids_var = query.add_variable("parentFolderIds", "[String!]")
folder_paths_var = query.add_variable("folderPaths", "[String!]")
folder_names_var = query.add_variable("folderNames", "[String!]")
has_subsets_var = query.add_variable("folderHasSubsets", "Boolean!")
project_field = query.add_field("project")
project_field.set_filter("name", project_name_var)
folders_field = project_field.add_field("folders", has_edges=True)
folders_field.set_filter("ids", folder_ids_var)
folders_field.set_filter("parentIds", parent_folder_ids_var)
folders_field.set_filter("names", folder_names_var)
folders_field.set_filter("paths", folder_paths_var)
folders_field.set_filter("hasSubsets", has_subsets_var)
nested_fields = fields_to_dict(fields)
query_queue = collections.deque()
for key, value in nested_fields.items():
query_queue.append((key, value, folders_field))
while query_queue:
item = query_queue.popleft()
key, value, parent = item
field = parent.add_field(key)
if value is FIELD_VALUE:
continue
for k, v in value.items():
query_queue.append((k, v, field))
return query
def tasks_graphql_query(fields):
query = GraphQlQuery("TasksQuery")
project_name_var = query.add_variable("projectName", "String!")
task_ids_var = query.add_variable("taskIds", "[String!]")
task_names_var = query.add_variable("taskNames", "[String!]")
task_types_var = query.add_variable("taskTypes", "[String!]")
folder_ids_var = query.add_variable("folderIds", "[String!]")
project_field = query.add_field("project")
project_field.set_filter("name", project_name_var)
tasks_field = project_field.add_field("tasks", has_edges=True)
tasks_field.set_filter("ids", task_ids_var)
# WARNING: At moment when this been created 'names' filter is not supported
tasks_field.set_filter("names", task_names_var)
tasks_field.set_filter("taskTypes", task_types_var)
tasks_field.set_filter("folderIds", folder_ids_var)
nested_fields = fields_to_dict(fields)
query_queue = collections.deque()
for key, value in nested_fields.items():
query_queue.append((key, value, tasks_field))
while query_queue:
item = query_queue.popleft()
key, value, parent = item
field = parent.add_field(key)
if value is FIELD_VALUE:
continue
for k, v in value.items():
query_queue.append((k, v, field))
return query
def subsets_graphql_query(fields):
query = GraphQlQuery("SubsetsQuery")
project_name_var = query.add_variable("projectName", "String!")
folder_ids_var = query.add_variable("folderIds", "[String!]")
subset_ids_var = query.add_variable("subsetIds", "[String!]")
subset_names_var = query.add_variable("subsetNames", "[String!]")
project_field = query.add_field("project")
project_field.set_filter("name", project_name_var)
subsets_field = project_field.add_field("subsets", has_edges=True)
subsets_field.set_filter("ids", subset_ids_var)
subsets_field.set_filter("names", subset_names_var)
subsets_field.set_filter("folderIds", folder_ids_var)
nested_fields = fields_to_dict(set(fields))
query_queue = collections.deque()
for key, value in nested_fields.items():
query_queue.append((key, value, subsets_field))
while query_queue:
item = query_queue.popleft()
key, value, parent = item
field = parent.add_field(key)
if value is FIELD_VALUE:
continue
for k, v in value.items():
query_queue.append((k, v, field))
return query
def versions_graphql_query(fields):
query = GraphQlQuery("VersionsQuery")
project_name_var = query.add_variable("projectName", "String!")
subset_ids_var = query.add_variable("subsetIds", "[String!]")
version_ids_var = query.add_variable("versionIds", "[String!]")
versions_var = query.add_variable("versions", "[Int!]")
hero_only_var = query.add_variable("heroOnly", "Boolean")
latest_only_var = query.add_variable("latestOnly", "Boolean")
hero_or_latest_only_var = query.add_variable(
"heroOrLatestOnly", "Boolean"
)
project_field = query.add_field("project")
project_field.set_filter("name", project_name_var)
subsets_field = project_field.add_field("versions", has_edges=True)
subsets_field.set_filter("ids", version_ids_var)
subsets_field.set_filter("subsetIds", subset_ids_var)
subsets_field.set_filter("versions", versions_var)
subsets_field.set_filter("heroOnly", hero_only_var)
subsets_field.set_filter("latestOnly", latest_only_var)
subsets_field.set_filter("heroOrLatestOnly", hero_or_latest_only_var)
nested_fields = fields_to_dict(set(fields))
query_queue = collections.deque()
for key, value in nested_fields.items():
query_queue.append((key, value, subsets_field))
while query_queue:
item = query_queue.popleft()
key, value, parent = item
field = parent.add_field(key)
if value is FIELD_VALUE:
continue
for k, v in value.items():
query_queue.append((k, v, field))
return query
def representations_graphql_query(fields):
query = GraphQlQuery("RepresentationsQuery")
project_name_var = query.add_variable("projectName", "String!")
repre_ids_var = query.add_variable("representationIds", "[String!]")
repre_names_var = query.add_variable("representationNames", "[String!]")
version_ids_var = query.add_variable("versionIds", "[String!]")
project_field = query.add_field("project")
project_field.set_filter("name", project_name_var)
repres_field = project_field.add_field("representations", has_edges=True)
repres_field.set_filter("ids", repre_ids_var)
repres_field.set_filter("versionIds", version_ids_var)
repres_field.set_filter("names", repre_names_var)
nested_fields = fields_to_dict(set(fields))
query_queue = collections.deque()
for key, value in nested_fields.items():
query_queue.append((key, value, repres_field))
while query_queue:
item = query_queue.popleft()
key, value, parent = item
field = parent.add_field(key)
if value is FIELD_VALUE:
continue
for k, v in value.items():
query_queue.append((k, v, field))
return query
def representations_parents_qraphql_query(
version_fields, subset_fields, folder_fields
):
query = GraphQlQuery("RepresentationsParentsQuery")
project_name_var = query.add_variable("projectName", "String!")
repre_ids_var = query.add_variable("representationIds", "[String!]")
project_field = query.add_field("project")
project_field.set_filter("name", project_name_var)
repres_field = project_field.add_field("representations", has_edges=True)
repres_field.add_field("id")
repres_field.set_filter("ids", repre_ids_var)
version_field = repres_field.add_field("version")
fields_queue = collections.deque()
for key, value in fields_to_dict(version_fields).items():
fields_queue.append((key, value, version_field))
subset_field = version_field.add_field("subset")
for key, value in fields_to_dict(subset_fields).items():
fields_queue.append((key, value, subset_field))
folder_field = subset_field.add_field("folder")
for key, value in fields_to_dict(folder_fields).items():
fields_queue.append((key, value, folder_field))
while fields_queue:
item = fields_queue.popleft()
key, value, parent = item
field = parent.add_field(key)
if value is FIELD_VALUE:
continue
for k, v in value.items():
fields_queue.append((k, v, field))
return query
def workfiles_info_graphql_query(fields):
query = GraphQlQuery("WorkfilesInfo")
project_name_var = query.add_variable("projectName", "String!")
workfiles_info_ids = query.add_variable("workfileIds", "[String!]")
task_ids_var = query.add_variable("taskIds", "[String!]")
paths_var = query.add_variable("paths", "[String!]")
project_field = query.add_field("project")
project_field.set_filter("name", project_name_var)
workfiles_field = project_field.add_field("workfiles", has_edges=True)
workfiles_field.set_filter("ids", workfiles_info_ids)
workfiles_field.set_filter("taskIds", task_ids_var)
workfiles_field.set_filter("paths", paths_var)
nested_fields = fields_to_dict(set(fields))
query_queue = collections.deque()
for key, value in nested_fields.items():
query_queue.append((key, value, workfiles_field))
while query_queue:
item = query_queue.popleft()
key, value, parent = item
field = parent.add_field(key)
if value is FIELD_VALUE:
continue
for k, v in value.items():
query_queue.append((k, v, field))
return query
def events_graphql_query(fields):
query = GraphQlQuery("WorkfilesInfo")
topics_var = query.add_variable("eventTopics", "[String!]")
projects_var = query.add_variable("projectNames", "[String!]")
states_var = query.add_variable("eventStates", "[String!]")
users_var = query.add_variable("eventUsers", "[String!]")
include_logs_var = query.add_variable("includeLogsFilter", "Boolean!")
events_field = query.add_field("events", has_edges=True)
events_field.set_filter("topics", topics_var)
events_field.set_filter("projects", projects_var)
events_field.set_filter("states", states_var)
events_field.set_filter("users", users_var)
events_field.set_filter("includeLogs", include_logs_var)
nested_fields = fields_to_dict(set(fields))
query_queue = collections.deque()
for key, value in nested_fields.items():
query_queue.append((key, value, events_field))
while query_queue:
item = query_queue.popleft()
key, value, parent = item
field = parent.add_field(key)
if value is FIELD_VALUE:
continue
for k, v in value.items():
query_queue.append((k, v, field))
return query

View file

@ -0,0 +1,688 @@
import copy
import collections
import uuid
from abc import ABCMeta, abstractproperty
import six
from ._api import get_server_api_connection
from .utils import create_entity_id, REMOVED_VALUE
def _create_or_convert_to_id(entity_id=None):
if entity_id is None:
return create_entity_id()
# Validate if can be converted to uuid
uuid.UUID(entity_id)
return entity_id
def new_folder_entity(
name,
folder_type,
parent_id=None,
attribs=None,
data=None,
thumbnail_id=None,
entity_id=None
):
"""Create skeleton data of folder entity.
Args:
name (str): Is considered as unique identifier of folder in project.
parent_id (str): Id of parent folder.
attribs (Dict[str, Any]): Explicitly set attributes of folder.
data (Dict[str, Any]): Custom folder data. Empty dictionary is used
if not passed.
thumbnail_id (str): Id of thumbnail related to folder.
entity_id (str): Predefined id of entity. New id is
created if not passed.
Returns:
Dict[str, Any]: Skeleton of folder entity.
"""
if attribs is None:
attribs = {}
if data is None:
data = {}
if parent_id is not None:
parent_id = _create_or_convert_to_id(parent_id)
return {
"id": _create_or_convert_to_id(entity_id),
"name": name,
# This will be ignored
"folderType": folder_type,
"parentId": parent_id,
"data": data,
"attrib": attribs,
"thumbnailId": thumbnail_id
}
def new_subset_entity(
name, family, folder_id, attribs=None, data=None, entity_id=None
):
"""Create skeleton data of subset entity.
Args:
name (str): Is considered as unique identifier of subset under folder.
family (str): Subset's family.
folder_id (str): Id of parent folder.
attribs (Dict[str, Any]): Explicitly set attributes of subset.
data (Dict[str, Any]): Subset entity data. Empty dictionary is used
if not passed. Value of 'family' is used to fill 'family'.
entity_id (str): Predefined id of entity. New id is
created if not passed.
Returns:
Dict[str, Any]: Skeleton of subset entity.
"""
if attribs is None:
attribs = {}
if data is None:
data = {}
return {
"id": _create_or_convert_to_id(entity_id),
"name": name,
"family": family,
"attrib": attribs,
"data": data,
"folderId": _create_or_convert_to_id(folder_id)
}
def new_version_entity(
version,
subset_id,
task_id=None,
thumbnail_id=None,
author=None,
attribs=None,
data=None,
entity_id=None
):
"""Create skeleton data of version entity.
Args:
version (int): Is considered as unique identifier of version
under subset.
subset_id (str): Id of parent subset.
task_id (str): Id of task under which subset was created.
thumbnail_id (str): Thumbnail related to version.
author (str): Name of version author.
attribs (Dict[str, Any]): Explicitly set attributes of version.
data (Dict[str, Any]): Version entity custom data.
entity_id (str): Predefined id of entity. New id is
created if not passed.
Returns:
Dict[str, Any]: Skeleton of version entity.
"""
if attribs is None:
attribs = {}
if data is None:
data = {}
if data is None:
data = {}
output = {
"id": _create_or_convert_to_id(entity_id),
"version": int(version),
"subsetId": _create_or_convert_to_id(subset_id),
"attrib": attribs,
"data": data
}
if task_id:
output["taskId"] = task_id
if thumbnail_id:
output["thumbnailId"] = thumbnail_id
if author:
output["author"] = author
return output
def new_hero_version_entity(
version,
subset_id,
task_id=None,
thumbnail_id=None,
author=None,
attribs=None,
data=None,
entity_id=None
):
"""Create skeleton data of hero version entity.
Args:
version (int): Is considered as unique identifier of version
under subset. Should be same as standard version if there is any.
subset_id (str): Id of parent subset.
task_id (str): Id of task under which subset was created.
thumbnail_id (str): Thumbnail related to version.
author (str): Name of version author.
attribs (Dict[str, Any]): Explicitly set attributes of version.
data (Dict[str, Any]): Version entity data.
entity_id (str): Predefined id of entity. New id is
created if not passed.
Returns:
Dict[str, Any]: Skeleton of version entity.
"""
if attribs is None:
attribs = {}
if data is None:
data = {}
output = {
"id": _create_or_convert_to_id(entity_id),
"version": -abs(int(version)),
"subsetId": subset_id,
"attrib": attribs,
"data": data
}
if task_id:
output["taskId"] = task_id
if thumbnail_id:
output["thumbnailId"] = thumbnail_id
if author:
output["author"] = author
return output
def new_representation_entity(
name, version_id, attribs=None, data=None, entity_id=None
):
"""Create skeleton data of representation entity.
Args:
name (str): Representation name considered as unique identifier
of representation under version.
version_id (str): Id of parent version.
attribs (Dict[str, Any]): Explicitly set attributes of representation.
data (Dict[str, Any]): Representation entity data.
entity_id (str): Predefined id of entity. New id is created
if not passed.
Returns:
Dict[str, Any]: Skeleton of representation entity.
"""
if attribs is None:
attribs = {}
if data is None:
data = {}
return {
"id": _create_or_convert_to_id(entity_id),
"versionId": _create_or_convert_to_id(version_id),
"name": name,
"data": data,
"attrib": attribs
}
def new_workfile_info_doc(
filename, folder_id, task_name, files, data=None, entity_id=None
):
"""Create skeleton data of workfile info entity.
Workfile entity is at this moment used primarily for artist notes.
Args:
filename (str): Filename of workfile.
folder_id (str): Id of folder under which workfile live.
task_name (str): Task under which was workfile created.
files (List[str]): List of rootless filepaths related to workfile.
data (Dict[str, Any]): Additional metadata.
entity_id (str): Predefined id of entity. New id is created
if not passed.
Returns:
Dict[str, Any]: Skeleton of workfile info entity.
"""
if not data:
data = {}
return {
"id": _create_or_convert_to_id(entity_id),
"parent": _create_or_convert_to_id(folder_id),
"task_name": task_name,
"filename": filename,
"data": data,
"files": files
}
@six.add_metaclass(ABCMeta)
class AbstractOperation(object):
"""Base operation class.
Opration represent a call into database. The call can create, change or
remove data.
Args:
project_name (str): On which project operation will happen.
entity_type (str): Type of entity on which change happens.
e.g. 'folder', 'representation' etc.
"""
def __init__(self, project_name, entity_type, session):
self._project_name = project_name
self._entity_type = entity_type
self._session = session
self._id = str(uuid.uuid4())
@property
def project_name(self):
return self._project_name
@property
def id(self):
"""Identifier of operation."""
return self._id
@property
def entity_type(self):
return self._entity_type
@abstractproperty
def operation_name(self):
"""Stringified type of operation."""
pass
def to_data(self):
"""Convert opration to data that can be converted to json or others.
Returns:
Dict[str, Any]: Description of operation.
"""
return {
"id": self._id,
"entity_type": self.entity_type,
"project_name": self.project_name,
"operation": self.operation_name
}
class CreateOperation(AbstractOperation):
"""Opeartion to create an entity.
Args:
project_name (str): On which project operation will happen.
entity_type (str): Type of entity on which change happens.
e.g. 'folder', 'representation' etc.
data (Dict[str, Any]): Data of entity that will be created.
"""
operation_name = "create"
def __init__(self, project_name, entity_type, data, session):
if not data:
data = {}
else:
data = copy.deepcopy(dict(data))
if "id" not in data:
data["id"] = create_entity_id()
self._data = data
super(CreateOperation, self).__init__(
project_name, entity_type, session
)
def __setitem__(self, key, value):
self.set_value(key, value)
def __getitem__(self, key):
return self.data[key]
def set_value(self, key, value):
self.data[key] = value
def get(self, key, *args, **kwargs):
return self.data.get(key, *args, **kwargs)
@property
def con(self):
return self.session.con
@property
def session(self):
return self._session
@property
def entity_id(self):
return self._data["id"]
@property
def data(self):
return self._data
def to_data(self):
output = super(CreateOperation, self).to_data()
output["data"] = copy.deepcopy(self.data)
return output
def to_server_operation(self):
return {
"id": self.id,
"type": "create",
"entityType": self.entity_type,
"entityId": self.entity_id,
"data": self._data
}
class UpdateOperation(AbstractOperation):
"""Operation to update an entity.
Args:
project_name (str): On which project operation will happen.
entity_type (str): Type of entity on which change happens.
e.g. 'folder', 'representation' etc.
entity_id (str): Identifier of an entity.
update_data (Dict[str, Any]): Key -> value changes that will be set in
database. If value is set to 'REMOVED_VALUE' the key will be
removed. Only first level of dictionary is checked (on purpose).
"""
operation_name = "update"
def __init__(
self, project_name, entity_type, entity_id, update_data, session
):
super(UpdateOperation, self).__init__(
project_name, entity_type, session
)
self._entity_id = entity_id
self._update_data = update_data
@property
def entity_id(self):
return self._entity_id
@property
def update_data(self):
return self._update_data
@property
def con(self):
return self.session.con
@property
def session(self):
return self._session
def to_data(self):
changes = {}
for key, value in self._update_data.items():
if value is REMOVED_VALUE:
value = None
changes[key] = value
output = super(UpdateOperation, self).to_data()
output.update({
"entity_id": self.entity_id,
"changes": changes
})
return output
def to_server_operation(self):
if not self._update_data:
return None
update_data = {}
for key, value in self._update_data.items():
if value is REMOVED_VALUE:
value = None
update_data[key] = value
return {
"id": self.id,
"type": "update",
"entityType": self.entity_type,
"entityId": self.entity_id,
"data": update_data
}
class DeleteOperation(AbstractOperation):
"""Opeartion to delete an entity.
Args:
project_name (str): On which project operation will happen.
entity_type (str): Type of entity on which change happens.
e.g. 'folder', 'representation' etc.
entity_id (str): Entity id that will be removed.
"""
operation_name = "delete"
def __init__(self, project_name, entity_type, entity_id, session):
self._entity_id = entity_id
super(DeleteOperation, self).__init__(
project_name, entity_type, session
)
@property
def entity_id(self):
return self._entity_id
@property
def con(self):
return self.session.con
@property
def session(self):
return self._session
def to_data(self):
output = super(DeleteOperation, self).to_data()
output["entity_id"] = self.entity_id
return output
def to_server_operation(self):
return {
"id": self.id,
"type": self.operation_name,
"entityId": self.entity_id,
"entityType": self.entity_type,
}
class OperationsSession(object):
"""Session storing operations that should happen in an order.
At this moment does not handle anything special can be sonsidered as
stupid list of operations that will happen after each other. If creation
of same entity is there multiple times it's handled in any way and entity
values are not validated.
All operations must be related to single project.
Args:
project_name (str): Project name to which are operations related.
"""
def __init__(self, con=None):
if con is None:
con = get_server_api_connection()
self._con = con
self._project_cache = {}
self._operations = []
self._nested_operations = collections.defaultdict(list)
@property
def con(self):
return self._con
def get_project(self, project_name):
if project_name not in self._project_cache:
self._project_cache[project_name] = self.con.get_project(
project_name)
return copy.deepcopy(self._project_cache[project_name])
def __len__(self):
return len(self._operations)
def add(self, operation):
"""Add operation to be processed.
Args:
operation (BaseOperation): Operation that should be processed.
"""
if not isinstance(
operation,
(CreateOperation, UpdateOperation, DeleteOperation)
):
raise TypeError("Expected Operation object got {}".format(
str(type(operation))
))
self._operations.append(operation)
def append(self, operation):
"""Add operation to be processed.
Args:
operation (BaseOperation): Operation that should be processed.
"""
self.add(operation)
def extend(self, operations):
"""Add operations to be processed.
Args:
operations (List[BaseOperation]): Operations that should be
processed.
"""
for operation in operations:
self.add(operation)
def remove(self, operation):
"""Remove operation."""
self._operations.remove(operation)
def clear(self):
"""Clear all registered operations."""
self._operations = []
def to_data(self):
return [
operation.to_data()
for operation in self._operations
]
def commit(self):
"""Commit session operations."""
operations, self._operations = self._operations, []
if not operations:
return
operations_by_project = collections.defaultdict(list)
for operation in operations:
operations_by_project[operation.project_name].append(operation)
for project_name, operations in operations_by_project.items():
operations_body = []
for operation in operations:
body = operation.to_server_operation()
if body is not None:
operations_body.append(body)
self._con.send_batch_operations(
project_name, operations_body, can_fail=False
)
def create_entity(self, project_name, entity_type, data, nested_id=None):
"""Fast access to 'CreateOperation'.
Args:
project_name (str): On which project the creation happens.
entity_type (str): Which entity type will be created.
data (Dicst[str, Any]): Entity data.
nested_id (str): Id of other operation from which is triggered
operation -> Operations can trigger suboperations but they
must be added to operations list after it's parent is added.
Returns:
CreateOperation: Object of update operation.
"""
operation = CreateOperation(
project_name, entity_type, data, self
)
if nested_id:
self._nested_operations[nested_id].append(operation)
else:
self.add(operation)
if operation.id in self._nested_operations:
self.extend(self._nested_operations.pop(operation.id))
return operation
def update_entity(
self, project_name, entity_type, entity_id, update_data, nested_id=None
):
"""Fast access to 'UpdateOperation'.
Returns:
UpdateOperation: Object of update operation.
"""
operation = UpdateOperation(
project_name, entity_type, entity_id, update_data, self
)
if nested_id:
self._nested_operations[nested_id].append(operation)
else:
self.add(operation)
if operation.id in self._nested_operations:
self.extend(self._nested_operations.pop(operation.id))
return operation
def delete_entity(
self, project_name, entity_type, entity_id, nested_id=None
):
"""Fast access to 'DeleteOperation'.
Returns:
DeleteOperation: Object of delete operation.
"""
operation = DeleteOperation(
project_name, entity_type, entity_id, self
)
if nested_id:
self._nested_operations[nested_id].append(operation)
else:
self.add(operation)
if operation.id in self._nested_operations:
self.extend(self._nested_operations.pop(operation.id))
return operation

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,219 @@
import os
import time
import collections
import appdirs
FileInfo = collections.namedtuple(
"FileInfo",
("path", "size", "modification_time")
)
class ThumbnailCache:
"""Cache of thumbnails on local storage.
Thumbnails are cached to appdirs to predefined directory. Each project has
own subfolder with thumbnails -> that's because each project has own
thumbnail id validation and file names are thumbnail ids with matching
extension. Extensions are predefined (.png and .jpeg).
Cache has cleanup mechanism which is triggered on initialized by default.
The cleanup has 2 levels:
1. soft cleanup which remove all files that are older then 'days_alive'
2. max size cleanup which remove all files until the thumbnails folder
contains less then 'max_filesize'
- this is time consuming so it's not triggered automatically
Args:
cleanup (bool): Trigger soft cleanup (Cleanup expired thumbnails).
"""
# Lifetime of thumbnails (in seconds)
# - default 3 days
days_alive = 3 * 24 * 60 * 60
# Max size of thumbnail directory (in bytes)
# - default 2 Gb
max_filesize = 2 * 1024 * 1024 * 1024
def __init__(self, cleanup=True):
self._thumbnails_dir = None
if cleanup:
self.cleanup()
def get_thumbnails_dir(self):
"""Root directory where thumbnails are stored.
Returns:
str: Path to thumbnails root.
"""
if self._thumbnails_dir is None:
directory = appdirs.user_data_dir("ayon", "ynput")
self._thumbnails_dir = os.path.join(directory, "thumbnails")
return self._thumbnails_dir
thumbnails_dir = property(get_thumbnails_dir)
def get_thumbnails_dir_file_info(self):
"""Get information about all files in thumbnails directory.
Returns:
List[FileInfo]: List of file information about all files.
"""
thumbnails_dir = self.thumbnails_dir
files_info = []
if not os.path.exists(thumbnails_dir):
return files_info
for root, _, filenames in os.walk(thumbnails_dir):
for filename in filenames:
path = os.path.join(root, filename)
files_info.append(FileInfo(
path, os.path.getsize(path), os.path.getmtime(path)
))
return files_info
def get_thumbnails_dir_size(self, files_info=None):
"""Got full size of thumbnail directory.
Args:
files_info (List[FileInfo]): Prepared file information about
files in thumbnail directory.
Returns:
int: File size of all files in thumbnail directory.
"""
if files_info is None:
files_info = self.get_thumbnails_dir_file_info()
if not files_info:
return 0
return sum(
file_info.size
for file_info in files_info
)
def cleanup(self, check_max_size=False):
"""Cleanup thumbnails directory.
Args:
check_max_size (bool): Also cleanup files to match max size of
thumbnails directory.
"""
thumbnails_dir = self.get_thumbnails_dir()
# Skip if thumbnails dir does not exists yet
if not os.path.exists(thumbnails_dir):
return
self._soft_cleanup(thumbnails_dir)
if check_max_size:
self._max_size_cleanup(thumbnails_dir)
def _soft_cleanup(self, thumbnails_dir):
current_time = time.time()
for root, _, filenames in os.walk(thumbnails_dir):
for filename in filenames:
path = os.path.join(root, filename)
modification_time = os.path.getmtime(path)
if current_time - modification_time > self.days_alive:
os.remove(path)
def _max_size_cleanup(self, thumbnails_dir):
files_info = self.get_thumbnails_dir_file_info()
size = self.get_thumbnails_dir_size(files_info)
if size < self.max_filesize:
return
sorted_file_info = collections.deque(
sorted(files_info, key=lambda item: item.modification_time)
)
diff = size - self.max_filesize
while diff > 0:
if not sorted_file_info:
break
file_info = sorted_file_info.popleft()
diff -= file_info.size
os.remove(file_info.path)
def get_thumbnail_filepath(self, project_name, thumbnail_id):
"""Get thumbnail by thumbnail id.
Args:
project_name (str): Name of project.
thumbnail_id (str): Thumbnail id.
Returns:
Union[str, None]: Path to thumbnail image or None if thumbnail
is not cached yet.
"""
if not thumbnail_id:
return None
for ext in (
".png",
".jpeg",
):
filepath = os.path.join(
self.thumbnails_dir, project_name, thumbnail_id + ext
)
if os.path.exists(filepath):
return filepath
return None
def get_project_dir(self, project_name):
"""Path to root directory for specific project.
Args:
project_name (str): Name of project for which root directory path
should be returned.
Returns:
str: Path to root of project's thumbnails.
"""
return os.path.join(self.thumbnails_dir, project_name)
def make_sure_project_dir_exists(self, project_name):
project_dir = self.get_project_dir(project_name)
if not os.path.exists(project_dir):
os.makedirs(project_dir)
return project_dir
def store_thumbnail(self, project_name, thumbnail_id, content, mime_type):
"""Store thumbnail to cache folder.
Args:
project_name (str): Project where the thumbnail belong to.
thumbnail_id (str): Id of thumbnail.
content (bytes): Byte content of thumbnail file.
mime_data (str): Type of content.
Returns:
str: Path to cached thumbnail image file.
"""
if mime_type == "image/png":
ext = ".png"
elif mime_type == "image/jpeg":
ext = ".jpeg"
else:
raise ValueError(
"Unknown mime type for thumbnail \"{}\"".format(mime_type))
project_dir = self.make_sure_project_dir_exists(project_name)
thumbnail_path = os.path.join(project_dir, thumbnail_id + ext)
with open(thumbnail_path, "wb") as stream:
stream.write(content)
current_time = time.time()
os.utime(thumbnail_path, (current_time, current_time))
return thumbnail_path

View file

@ -0,0 +1,451 @@
import re
import datetime
import uuid
import string
import collections
try:
# Python 3
from urllib.parse import urlparse, urlencode
except ImportError:
# Python 2
from urlparse import urlparse
from urllib import urlencode
import requests
import unidecode
from .exceptions import UrlError
REMOVED_VALUE = object()
SLUGIFY_WHITELIST = string.ascii_letters + string.digits
SLUGIFY_SEP_WHITELIST = " ,./\\;:!|*^#@~+-_="
RepresentationParents = collections.namedtuple(
"RepresentationParents",
("version", "subset", "folder", "project")
)
def prepare_query_string(key_values):
"""Prepare data to query string.
If there are any values a query starting with '?' is returned otherwise
an empty string.
Args:
dict[str, Any]: Query values.
Returns:
str: Query string.
"""
if not key_values:
return ""
return "?{}".format(urlencode(key_values))
def create_entity_id():
return uuid.uuid1().hex
def convert_entity_id(entity_id):
if not entity_id:
return None
if isinstance(entity_id, uuid.UUID):
return entity_id.hex
try:
return uuid.UUID(entity_id).hex
except (TypeError, ValueError, AttributeError):
pass
return None
def convert_or_create_entity_id(entity_id=None):
output = convert_entity_id(entity_id)
if output is None:
output = create_entity_id()
return output
def entity_data_json_default(value):
if isinstance(value, datetime.datetime):
return int(value.timestamp())
raise TypeError(
"Object of type {} is not JSON serializable".format(str(type(value)))
)
def slugify_string(
input_string,
separator="_",
slug_whitelist=SLUGIFY_WHITELIST,
split_chars=SLUGIFY_SEP_WHITELIST,
min_length=1,
lower=False,
make_set=False,
):
"""Slugify a text string.
This function removes transliterates input string to ASCII, removes
special characters and use join resulting elements using
specified separator.
Args:
input_string (str): Input string to slugify
separator (str): A string used to separate returned elements
(default: "_")
slug_whitelist (str): Characters allowed in the output
(default: ascii letters, digits and the separator)
split_chars (str): Set of characters used for word splitting
(there is a sane default)
lower (bool): Convert to lower-case (default: False)
make_set (bool): Return "set" object instead of string.
min_length (int): Minimal length of an element (word).
Returns:
Union[str, Set[str]]: Based on 'make_set' value returns slugified
string.
"""
tmp_string = unidecode.unidecode(input_string)
if lower:
tmp_string = tmp_string.lower()
parts = [
# Remove all characters that are not in whitelist
re.sub("[^{}]".format(re.escape(slug_whitelist)), "", part)
# Split text into part by split characters
for part in re.split("[{}]".format(re.escape(split_chars)), tmp_string)
]
# Filter text parts by length
filtered_parts = [
part
for part in parts
if len(part) >= min_length
]
if make_set:
return set(filtered_parts)
return separator.join(filtered_parts)
def failed_json_default(value):
return "< Failed value {} > {}".format(type(value), str(value))
def prepare_attribute_changes(old_entity, new_entity, replace=False):
attrib_changes = {}
new_attrib = new_entity.get("attrib")
old_attrib = old_entity.get("attrib")
if new_attrib is None:
if not replace:
return attrib_changes
new_attrib = {}
if old_attrib is None:
return new_attrib
for attr, new_attr_value in new_attrib.items():
old_attr_value = old_attrib.get(attr)
if old_attr_value != new_attr_value:
attrib_changes[attr] = new_attr_value
if replace:
for attr in old_attrib:
if attr not in new_attrib:
attrib_changes[attr] = REMOVED_VALUE
return attrib_changes
def prepare_entity_changes(old_entity, new_entity, replace=False):
"""Prepare changes of entities."""
changes = {}
for key, new_value in new_entity.items():
if key == "attrib":
continue
old_value = old_entity.get(key)
if old_value != new_value:
changes[key] = new_value
if replace:
for key in old_entity:
if key not in new_entity:
changes[key] = REMOVED_VALUE
attr_changes = prepare_attribute_changes(old_entity, new_entity, replace)
if attr_changes:
changes["attrib"] = attr_changes
return changes
def _try_parse_url(url):
try:
return urlparse(url)
except BaseException:
return None
def _try_connect_to_server(url):
try:
# TODO add validation if the url lead to Ayon server
# - thiw won't validate if the url lead to 'google.com'
requests.get(url)
except BaseException:
return False
return True
def login_to_server(url, username, password):
"""Use login to the server to receive token.
Args:
url (str): Server url.
username (str): User's username.
password (str): User's password.
Returns:
Union[str, None]: User's token if login was successfull.
Otherwise 'None'.
"""
headers = {"Content-Type": "application/json"}
response = requests.post(
"{}/api/auth/login".format(url),
headers=headers,
json={
"name": username,
"password": password
}
)
token = None
# 200 - success
# 401 - invalid credentials
# * - other issues
if response.status_code == 200:
token = response.json()["token"]
return token
def logout_from_server(url, token):
"""Logout from server and throw token away.
Args:
url (str): Url from which should be logged out.
token (str): Token which should be used to log out.
"""
headers = {
"Content-Type": "application/json",
"Authorization": "Bearer {}".format(token)
}
requests.post(
url + "/api/auth/logout",
headers=headers
)
def is_token_valid(url, token):
"""Check if token is valid.
Args:
url (str): Server url.
token (str): User's token.
Returns:
bool: True if token is valid.
"""
headers = {
"Content-Type": "application/json",
"Authorization": "Bearer {}".format(token)
}
response = requests.get(
"{}/api/users/me".format(url),
headers=headers
)
return response.status_code == 200
def validate_url(url):
"""Validate url if is valid and server is available.
Validation checks if can be parsed as url and contains scheme.
Function will try to autofix url thus will return modified url when
connection to server works.
```python
my_url = "my.server.url"
try:
# Store new url
validated_url = validate_url(my_url)
except UrlError:
# Handle invalid url
...
```
Args:
url (str): Server url.
Returns:
Url which was used to connect to server.
Raises:
UrlError: Error with short description and hints for user.
"""
stripperd_url = url.strip()
if not stripperd_url:
raise UrlError(
"Invalid url format. Url is empty.",
title="Invalid url format",
hints=["url seems to be empty"]
)
# Not sure if this is good idea?
modified_url = stripperd_url.rstrip("/")
parsed_url = _try_parse_url(modified_url)
universal_hints = [
"does the url work in browser?"
]
if parsed_url is None:
raise UrlError(
"Invalid url format. Url cannot be parsed as url \"{}\".".format(
modified_url
),
title="Invalid url format",
hints=universal_hints
)
# Try add 'https://' scheme if is missing
# - this will trigger UrlError if both will crash
if not parsed_url.scheme:
new_url = "https://" + modified_url
if _try_connect_to_server(new_url):
return new_url
if _try_connect_to_server(modified_url):
return modified_url
hints = []
if "/" in parsed_url.path or not parsed_url.scheme:
new_path = parsed_url.path.split("/")[0]
if not parsed_url.scheme:
new_path = "https://" + new_path
hints.append(
"did you mean \"{}\"?".format(parsed_url.scheme + new_path)
)
raise UrlError(
"Couldn't connect to server on \"{}\"".format(url),
title="Couldn't connect to server",
hints=hints + universal_hints
)
class TransferProgress:
"""Object to store progress of download/upload from/to server."""
def __init__(self):
self._started = False
self._transfer_done = False
self._transfered = 0
self._content_size = None
self._failed = False
self._fail_reason = None
self._source_url = "N/A"
self._destination_url = "N/A"
def get_content_size(self):
return self._content_size
def set_content_size(self, content_size):
if self._content_size is not None:
raise ValueError("Content size was set more then once")
self._content_size = content_size
def get_started(self):
return self._started
def set_started(self):
if self._started:
raise ValueError("Progress already started")
self._started = True
def get_transfer_done(self):
return self._transfer_done
def set_transfer_done(self):
if self._transfer_done:
raise ValueError("Progress was already marked as done")
if not self._started:
raise ValueError("Progress didn't start yet")
self._transfer_done = True
def get_failed(self):
return self._failed
def get_fail_reason(self):
return self._fail_reason
def set_failed(self, reason):
self._fail_reason = reason
self._failed = True
def get_transferred_size(self):
return self._transfered
def set_transferred_size(self, transfered):
self._transfered = transfered
def add_transferred_chunk(self, chunk_size):
self._transfered += chunk_size
def get_source_url(self):
return self._source_url
def set_source_url(self, url):
self._source_url = url
def get_destination_url(self):
return self._destination_url
def set_destination_url(self, url):
self._destination_url = url
@property
def is_running(self):
if (
not self.started
or self.done
or self.failed
):
return False
return True
@property
def transfer_progress(self):
if self._content_size is None:
return None
return (self._transfered * 100.0) / float(self._content_size)
content_size = property(get_content_size, set_content_size)
started = property(get_started)
transfer_done = property(get_transfer_done)
failed = property(get_failed)
fail_reason = property(get_fail_reason)
source_url = property(get_source_url, set_source_url)
destination_url = property(get_destination_url, set_destination_url)
content_size = property(get_content_size, set_content_size)
transferred_size = property(get_transferred_size, set_transferred_size)

View file

@ -0,0 +1,2 @@
"""Package declaring Python API for Ayon server."""
__version__ = "0.1.16"

18
poetry.lock generated
View file

@ -302,24 +302,6 @@ files = [
pycodestyle = ">=2.10.0"
tomli = {version = "*", markers = "python_version < \"3.11\""}
[[package]]
name = "ayon-python-api"
version = "0.1.16"
description = "AYON Python API"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "ayon-python-api-0.1.16.tar.gz", hash = "sha256:666110954dd75b2be1699a29b4732cfb0bcb09d01f64fba4449bfc8ac1fb43f1"},
{file = "ayon_python_api-0.1.16-py3-none-any.whl", hash = "sha256:bbcd6df1f80ddf32e653a1bb31289cb5fd1a8bea36ab4c8e6aef08c41b6393de"},
]
[package.dependencies]
appdirs = ">=1,<2"
requests = ">=2.27.1"
six = ">=1.15"
Unidecode = ">=1.2.0"
[[package]]
name = "babel"
version = "2.11.0"

View file

@ -70,7 +70,6 @@ requests = "^2.25.1"
pysftp = "^0.2.9"
dropbox = "^11.20.0"
aiohttp-middlewares = "^2.0.0"
ayon-python-api = "^0.1"
opencolorio = "^2.2.0"
Unidecode = "^1.2"