diff --git a/ayon_start.py b/ayon_start.py new file mode 100644 index 0000000000..11677b4415 --- /dev/null +++ b/ayon_start.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +"""Main entry point for AYON command. + +Bootstrapping process of AYON. +""" +import os +import sys +import site +import traceback +import contextlib + +# Enabled logging debug mode when "--debug" is passed +if "--verbose" in sys.argv: + expected_values = ( + "Expected: notset, debug, info, warning, error, critical" + " or integer [0-50]." + ) + idx = sys.argv.index("--verbose") + sys.argv.pop(idx) + if idx < len(sys.argv): + value = sys.argv.pop(idx) + else: + raise RuntimeError(( + f"Expect value after \"--verbose\" argument. {expected_values}" + )) + + log_level = None + low_value = value.lower() + if low_value.isdigit(): + log_level = int(low_value) + elif low_value == "notset": + log_level = 0 + elif low_value == "debug": + log_level = 10 + elif low_value == "info": + log_level = 20 + elif low_value == "warning": + log_level = 30 + elif low_value == "error": + log_level = 40 + elif low_value == "critical": + log_level = 50 + + if log_level is None: + raise ValueError(( + "Unexpected value after \"--verbose\" " + f"argument \"{value}\". {expected_values}" + )) + + os.environ["OPENPYPE_LOG_LEVEL"] = str(log_level) + +# Enable debug mode, may affect log level if log level is not defined +if "--debug" in sys.argv: + sys.argv.remove("--debug") + os.environ["OPENPYPE_DEBUG"] = "1" + +if "--automatic-tests" in sys.argv: + sys.argv.remove("--automatic-tests") + os.environ["IS_TEST"] = "1" + +if "--use-staging" in sys.argv: + sys.argv.remove("--use-staging") + os.environ["OPENPYPE_USE_STAGING"] = "1" + +_silent_commands = { + "run", + "standalonepublisher", + "extractenvironments", + "version" +} +if "--headless" in sys.argv: + os.environ["OPENPYPE_HEADLESS_MODE"] = "1" + sys.argv.remove("--headless") +elif os.getenv("OPENPYPE_HEADLESS_MODE") != "1": + os.environ.pop("OPENPYPE_HEADLESS_MODE", None) + + +IS_BUILT_APPLICATION = getattr(sys, "frozen", False) +HEADLESS_MODE_ENABLED = os.environ.get("OPENPYPE_HEADLESS_MODE") == "1" +SILENT_MODE_ENABLED = any(arg in _silent_commands for arg in sys.argv) + +_pythonpath = os.getenv("PYTHONPATH", "") +_python_paths = _pythonpath.split(os.pathsep) +if not IS_BUILT_APPLICATION: + # Code root defined by `start.py` directory + AYON_ROOT = os.path.dirname(os.path.abspath(__file__)) + _dependencies_path = site.getsitepackages()[-1] +else: + AYON_ROOT = os.path.dirname(sys.executable) + + # add dependencies folder to sys.pat for frozen code + _dependencies_path = os.path.normpath( + os.path.join(AYON_ROOT, "dependencies") + ) +# add stuff from `/dependencies` to PYTHONPATH. +sys.path.append(_dependencies_path) +_python_paths.append(_dependencies_path) + +# Vendored python modules that must not be in PYTHONPATH environment but +# are required for OpenPype processes +sys.path.insert(0, os.path.join(AYON_ROOT, "vendor", "python")) + +# Add common package to sys path +# - common contains common code for bootstraping and OpenPype processes +sys.path.insert(0, os.path.join(AYON_ROOT, "common")) + +# This is content of 'core' addon which is ATM part of build +common_python_vendor = os.path.join( + AYON_ROOT, + "openpype", + "vendor", + "python", + "common" +) +# Add tools dir to sys path for pyblish UI discovery +tools_dir = os.path.join(AYON_ROOT, "openpype", "tools") +for path in (AYON_ROOT, common_python_vendor, tools_dir): + while path in _python_paths: + _python_paths.remove(path) + + while path in sys.path: + sys.path.remove(path) + + _python_paths.insert(0, path) + sys.path.insert(0, path) + +os.environ["PYTHONPATH"] = os.pathsep.join(_python_paths) + +# enabled AYON state +os.environ["USE_AYON_SERVER"] = "1" +# Set this to point either to `python` from venv in case of live code +# or to `ayon` or `ayon_console` in case of frozen code +os.environ["OPENPYPE_EXECUTABLE"] = sys.executable +os.environ["AYON_ROOT"] = AYON_ROOT +os.environ["OPENPYPE_ROOT"] = AYON_ROOT +os.environ["OPENPYPE_REPOS_ROOT"] = AYON_ROOT +os.environ["AVALON_LABEL"] = "AYON" +# Set name of pyblish UI import +os.environ["PYBLISH_GUI"] = "pyblish_pype" + +import blessed # noqa: E402 +import certifi # noqa: E402 + + +if sys.__stdout__: + term = blessed.Terminal() + + def _print(message: str): + if SILENT_MODE_ENABLED: + pass + elif message.startswith("!!! "): + print(f'{term.orangered2("!!! ")}{message[4:]}') + elif message.startswith(">>> "): + print(f'{term.aquamarine3(">>> ")}{message[4:]}') + elif message.startswith("--- "): + print(f'{term.darkolivegreen3("--- ")}{message[4:]}') + elif message.startswith("*** "): + print(f'{term.gold("*** ")}{message[4:]}') + elif message.startswith(" - "): + print(f'{term.wheat(" - ")}{message[4:]}') + elif message.startswith(" . "): + print(f'{term.tan(" . ")}{message[4:]}') + elif message.startswith(" - "): + print(f'{term.seagreen3(" - ")}{message[7:]}') + elif message.startswith(" ! "): + print(f'{term.goldenrod(" ! ")}{message[7:]}') + elif message.startswith(" * "): + print(f'{term.aquamarine1(" * ")}{message[7:]}') + elif message.startswith(" "): + print(f'{term.darkseagreen3(" ")}{message[4:]}') + else: + print(message) +else: + def _print(message: str): + if not SILENT_MODE_ENABLED: + print(message) + + +# if SSL_CERT_FILE is not set prior to OpenPype launch, we set it to point +# to certifi bundle to make sure we have reasonably new CA certificates. +if not os.getenv("SSL_CERT_FILE"): + os.environ["SSL_CERT_FILE"] = certifi.where() +elif os.getenv("SSL_CERT_FILE") != certifi.where(): + _print("--- your system is set to use custom CA certificate bundle.") + +from ayon_common.connection.credentials import ( + ask_to_login_ui, + add_server, + need_server_or_login, + load_environments, + set_environments, + create_global_connection, + confirm_server_login, +) +from ayon_common.distribution.addon_distribution import AyonDistribution + + +def set_global_environments() -> None: + """Set global OpenPype's environments.""" + import acre + + from openpype.settings import get_general_environments + + general_env = get_general_environments() + + # first resolve general environment because merge doesn't expect + # values to be list. + # TODO: switch to OpenPype environment functions + merged_env = acre.merge( + acre.compute(acre.parse(general_env), cleanup=False), + dict(os.environ) + ) + env = acre.compute( + merged_env, + cleanup=False + ) + os.environ.clear() + os.environ.update(env) + + # Hardcoded default values + os.environ["PYBLISH_GUI"] = "pyblish_pype" + # Change scale factor only if is not set + if "QT_AUTO_SCREEN_SCALE_FACTOR" not in os.environ: + os.environ["QT_AUTO_SCREEN_SCALE_FACTOR"] = "1" + + +def set_addons_environments(): + """Set global environments for OpenPype modules. + + This requires to have OpenPype in `sys.path`. + """ + + import acre + from openpype.modules import ModulesManager + + modules_manager = ModulesManager() + + # Merge environments with current environments and update values + if module_envs := modules_manager.collect_global_environments(): + parsed_envs = acre.parse(module_envs) + env = acre.merge(parsed_envs, dict(os.environ)) + os.environ.clear() + os.environ.update(env) + + +def _connect_to_ayon_server(): + load_environments() + if not need_server_or_login(): + create_global_connection() + return + + if HEADLESS_MODE_ENABLED: + _print("!!! Cannot open v4 Login dialog in headless mode.") + _print(( + "!!! Please use `AYON_SERVER_URL` to specify server address" + " and 'AYON_TOKEN' to specify user's token." + )) + sys.exit(1) + + current_url = os.environ.get("AYON_SERVER_URL") + url, token, username = ask_to_login_ui(current_url, always_on_top=True) + if url is not None and token is not None: + confirm_server_login(url, token, username) + return + + if url is not None: + add_server(url, username) + + _print("!!! Login was not successful.") + sys.exit(0) + + +def _check_and_update_from_ayon_server(): + """Gets addon info from v4, compares with local folder and updates it. + + Raises: + RuntimeError + """ + + distribution = AyonDistribution() + distribution.distribute() + distribution.validate_distribution() + + python_paths = [ + path + for path in os.getenv("PYTHONPATH", "").split(os.pathsep) + if path + ] + + for path in distribution.get_sys_paths(): + sys.path.insert(0, path) + if path not in python_paths: + python_paths.append(path) + os.environ["PYTHONPATH"] = os.pathsep.join(python_paths) + + +def boot(): + """Bootstrap OpenPype.""" + + from openpype.version import __version__ + + # TODO load version + os.environ["OPENPYPE_VERSION"] = __version__ + os.environ["AYON_VERSION"] = __version__ + + use_staging = os.environ.get("OPENPYPE_USE_STAGING") == "1" + + _connect_to_ayon_server() + _check_and_update_from_ayon_server() + + # delete OpenPype module and it's submodules from cache so it is used from + # specific version + modules_to_del = [ + sys.modules.pop(module_name) + for module_name in tuple(sys.modules) + if module_name == "openpype" or module_name.startswith("openpype.") + ] + + for module_name in modules_to_del: + with contextlib.suppress(AttributeError, KeyError): + del sys.modules[module_name] + + from openpype import cli + from openpype.lib import terminal as t + + _print(">>> loading environments ...") + _print(" - global AYON ...") + set_global_environments() + _print(" - for addons ...") + set_addons_environments() + + # print info when not running scripts defined in 'silent commands' + if not SILENT_MODE_ENABLED: + info = get_info(use_staging) + info.insert(0, f">>> Using AYON from [ {AYON_ROOT} ]") + + t_width = 20 + with contextlib.suppress(ValueError, OSError): + t_width = os.get_terminal_size().columns - 2 + + _header = f"*** AYON [{__version__}] " + info.insert(0, _header + "-" * (t_width - len(_header))) + + for i in info: + t.echo(i) + + try: + cli.main(obj={}, prog_name="openpype") + except Exception: # noqa + exc_info = sys.exc_info() + _print("!!! OpenPype crashed:") + traceback.print_exception(*exc_info) + sys.exit(1) + + +def get_info(use_staging=None) -> list: + """Print additional information to console.""" + + inf = [] + if use_staging: + inf.append(("AYON variant", "staging")) + else: + inf.append(("AYON variant", "production")) + + # NOTE add addons information + + maximum = max(len(i[0]) for i in inf) + formatted = [] + for info in inf: + padding = (maximum - len(info[0])) + 1 + formatted.append(f'... {info[0]}:{" " * padding}[ {info[1]} ]') + return formatted + + +if __name__ == "__main__": + boot() diff --git a/common/openpype_common/distribution/__init__.py b/common/ayon_common/connection/__init__.py similarity index 100% rename from common/openpype_common/distribution/__init__.py rename to common/ayon_common/connection/__init__.py diff --git a/common/ayon_common/connection/credentials.py b/common/ayon_common/connection/credentials.py new file mode 100644 index 0000000000..13d8fe2d7d --- /dev/null +++ b/common/ayon_common/connection/credentials.py @@ -0,0 +1,475 @@ +"""Handle credentials and connection to server for client application. + +Cache and store used server urls. Store/load API keys to/from keyring if +needed. Store metadata about used urls, usernames for the urls and when was +the connection with the username established. + +On bootstrap is created global connection with information about site and +client version. The connection object lives in 'ayon_api'. +""" + +import os +import json +import platform +import datetime +import contextlib +from typing import Optional, Union, Any + +import ayon_api + +from ayon_api.exceptions import UrlError +from ayon_api.utils import ( + validate_url, + is_token_valid, + logout_from_server, +) + +from ayon_common.utils import get_ayon_appdirs, get_local_site_id + + +class ChangeUserResult: + def __init__( + self, logged_out, old_url, old_token, old_username, + new_url, new_token, new_username + ): + shutdown = logged_out + restart = new_url is not None and new_url != old_url + token_changed = new_token is not None and new_token == old_token + + self.logged_out = logged_out + self.old_url = old_url + self.old_token = old_token + self.old_username = old_username + self.new_url = new_url + self.new_token = new_token + self.new_username = new_username + + self.shutdown = shutdown + self.restart = restart + self.token_changed = token_changed + + +def _get_servers_path(): + return get_ayon_appdirs("used_servers.json") + + +def get_servers_info_data(): + """Metadata about used server on this machine. + + Store data about all used server urls, last used url and user username for + the url. Using this metadata we can remember which username was used per + url if token stored in keyring loose lifetime. + + Returns: + dict[str, Any]: Information about servers. + """ + + data = {} + servers_info_path = _get_servers_path() + if not os.path.exists(servers_info_path): + dirpath = os.path.dirname(servers_info_path) + if not os.path.exists(dirpath): + os.makedirs(dirpath) + + return data + + with open(servers_info_path, "r") as stream: + with contextlib.suppress(BaseException): + data = json.load(stream) + return data + + +def add_server(url: str, username: str): + """Add server to server info metadata. + + This function will also mark the url as last used url on the machine so on + next launch will be used. + + Args: + url (str): Server url. + username (str): Name of user used to log in. + """ + + servers_info_path = _get_servers_path() + data = get_servers_info_data() + data["last_server"] = url + if "urls" not in data: + data["urls"] = {} + data["urls"][url] = { + "updated_dt": datetime.datetime.now().strftime("%Y/%m/%d %H:%M:%S"), + "username": username, + } + + with open(servers_info_path, "w") as stream: + json.dump(data, stream) + + +def remove_server(url: str): + """Remove server url from servers information. + + This should be used on logout to completelly loose information about server + on the machine. + + Args: + url (str): Server url. + """ + + if not url: + return + + servers_info_path = _get_servers_path() + data = get_servers_info_data() + if data.get("last_server") == url: + data["last_server"] = None + + if "urls" in data: + data["urls"].pop(url, None) + + with open(servers_info_path, "w") as stream: + json.dump(data, stream) + + +def get_last_server( + data: Optional[dict[str, Any]] = None +) -> Union[str, None]: + """Last server used to log in on this machine. + + Args: + data (Optional[dict[str, Any]]): Prepared server information data. + + Returns: + Union[str, None]: Last used server url. + """ + + if data is None: + data = get_servers_info_data() + return data.get("last_server") + + +def get_last_username_by_url( + url: str, + data: Optional[dict[str, Any]] = None +) -> Union[str, None]: + """Get last username which was used for passed url. + + Args: + url (str): Server url. + data (Optional[dict[str, Any]]): Servers info. + + Returns: + Union[str, None]: Username. + """ + + if not url: + return None + + if data is None: + data = get_servers_info_data() + + if urls := data.get("urls"): + if url_info := urls.get(url): + return url_info.get("username") + return None + + +def get_last_server_with_username(): + """Receive last server and username used in last connection. + + Returns: + tuple[Union[str, None], Union[str, None]]: Url and username. + """ + + data = get_servers_info_data() + url = get_last_server(data) + username = get_last_username_by_url(url) + return url, username + + +class TokenKeyring: + # Fake username with hardcoded username + username_key = "username" + + def __init__(self, url): + try: + import keyring + + except Exception as exc: + raise NotImplementedError( + "Python module `keyring` is not available." + ) from exc + + # hack for cx_freeze and Windows keyring backend + if platform.system().lower() == "windows": + from keyring.backends import Windows + + keyring.set_keyring(Windows.WinVaultKeyring()) + + self._url = url + self._keyring_key = f"AYON/{url}" + + def get_value(self): + import keyring + + return keyring.get_password(self._keyring_key, self.username_key) + + def set_value(self, value): + import keyring + + if value is not None: + keyring.set_password(self._keyring_key, self.username_key, value) + return + + with contextlib.suppress(keyring.errors.PasswordDeleteError): + keyring.delete_password(self._keyring_key, self.username_key) + + +def load_token(url: str) -> Union[str, None]: + """Get token for url from keyring. + + Args: + url (str): Server url. + + Returns: + Union[str, None]: Token for passed url available in keyring. + """ + + return TokenKeyring(url).get_value() + + +def store_token(url: str, token: str): + """Store token by url to keyring. + + Args: + url (str): Server url. + token (str): User token to server. + """ + + TokenKeyring(url).set_value(token) + + +def ask_to_login_ui( + url: Optional[str] = None, + always_on_top: Optional[bool] = False +) -> tuple[str, str, str]: + """Ask user to login using UI. + + This should be used only when user is not yet logged in at all or available + credentials are invalid. To change credentials use 'change_user_ui' + function. + + Args: + url (Optional[str]): Server url that could be prefilled in UI. + always_on_top (Optional[bool]): Window will be drawn on top of + other windows. + + Returns: + tuple[str, str, str]: Url, user's token and username. + """ + + from .ui import ask_to_login + + if url is None: + url = get_last_server() + username = get_last_username_by_url(url) + return ask_to_login(url, username, always_on_top=always_on_top) + + +def change_user_ui() -> ChangeUserResult: + """Change user using UI. + + Show UI to user where he can change credentials or url. Output will contain + all information about old/new values of url, username, api key. If user + confirmed or declined values. + + Returns: + ChangeUserResult: Information about user change. + """ + + from .ui import change_user + + url, username = get_last_server_with_username() + token = load_token(url) + result = change_user(url, username, token) + new_url, new_token, new_username, logged_out = result + + output = ChangeUserResult( + logged_out, url, token, username, + new_url, new_token, new_username + ) + if output.logged_out: + logout(url, token) + + elif output.token_changed: + change_token( + output.new_url, + output.new_token, + output.new_username, + output.old_url + ) + return output + + +def change_token( + url: str, + token: str, + username: Optional[str] = None, + old_url: Optional[str] = None +): + """Change url and token in currently running session. + + Function can also change server url, in that case are previous credentials + NOT removed from cache. + + Args: + url (str): Url to server. + token (str): New token to be used for url connection. + username (Optional[str]): Username of logged user. + old_url (Optional[str]): Previous url. Value from 'get_last_server' + is used if not entered. + """ + + if old_url is None: + old_url = get_last_server() + if old_url and old_url == url: + remove_url_cache(old_url) + + # TODO check if ayon_api is already connected + add_server(url, username) + store_token(url, token) + ayon_api.change_token(url, token) + + +def remove_url_cache(url: str): + """Clear cache for server url. + + Args: + url (str): Server url which is removed from cache. + """ + + store_token(url, None) + + +def remove_token_cache(url: str, token: str): + """Remove token from local cache of url. + + Is skipped if cached token under the passed url is not the same + as passed token. + + Args: + url (str): Url to server. + token (str): Token to be removed from url cache. + """ + + if load_token(url) == token: + remove_url_cache(url) + + +def logout(url: str, token: str): + """Logout from server and throw token away. + + Args: + url (str): Url from which should be logged out. + token (str): Token which should be used to log out. + """ + + remove_server(url) + ayon_api.close_connection() + ayon_api.set_environments(None, None) + remove_token_cache(url, token) + logout_from_server(url, token) + + +def load_environments(): + """Load environments on startup. + + Handle environments needed for connection with server. Environments are + 'AYON_SERVER_URL' and 'AYON_TOKEN'. + + Server is looked up from environment. Already set environent is not + changed. If environemnt is not filled then last server stored in appdirs + is used. + + Token is skipped if url is not available. Otherwise, is also checked from + env and if is not available then uses 'load_token' to try to get token + based on server url. + """ + + server_url = os.environ.get("AYON_SERVER_URL") + if not server_url: + server_url = get_last_server() + if not server_url: + return + os.environ["AYON_SERVER_URL"] = server_url + + if not os.environ.get("AYON_TOKEN"): + if token := load_token(server_url): + os.environ["AYON_TOKEN"] = token + + +def set_environments(url: str, token: str): + """Change url and token environemnts in currently running process. + + Args: + url (str): New server url. + token (str): User's token. + """ + + ayon_api.set_environments(url, token) + + +def create_global_connection(): + """Create global connection with site id and client version. + + + Make sure the global connection in 'ayon_api' have entered site id and + client version. + """ + + if hasattr(ayon_api, "create_connection"): + ayon_api.create_connection( + get_local_site_id(), os.environ.get("AYON_VERSION") + ) + + +def need_server_or_login() -> bool: + """Check if server url or login to the server are needed. + + It is recommended to call 'load_environments' on startup before this check. + But in some cases this function could be called after startup. + + Returns: + bool: 'True' if server and token are available. Otherwise 'False'. + """ + + server_url = os.environ.get("AYON_SERVER_URL") + if not server_url: + return True + + try: + server_url = validate_url(server_url) + except UrlError: + return True + + token = os.environ.get("AYON_TOKEN") + if token: + return not is_token_valid(server_url, token) + + token = load_token(server_url) + return not is_token_valid(server_url, token) + + +def confirm_server_login(url, token, username): + """Confirm login of user and do necessary stepts to apply changes. + + This should not be used on "change" of user but on first login. + + Args: + url (str): Server url where user authenticated. + token (str): API token used for authentication to server. + username (Union[str, None]): Username related to API token. + """ + + add_server(url, username) + store_token(url, token) + set_environments(url, token) + create_global_connection() diff --git a/common/ayon_common/connection/ui/__init__.py b/common/ayon_common/connection/ui/__init__.py new file mode 100644 index 0000000000..96e573df0d --- /dev/null +++ b/common/ayon_common/connection/ui/__init__.py @@ -0,0 +1,12 @@ +from .login_window import ( + ServerLoginWindow, + ask_to_login, + change_user, +) + + +__all__ = ( + "ServerLoginWindow", + "ask_to_login", + "change_user", +) diff --git a/common/ayon_common/connection/ui/lib.py b/common/ayon_common/connection/ui/lib.py new file mode 100644 index 0000000000..e0f0a3d6c2 --- /dev/null +++ b/common/ayon_common/connection/ui/lib.py @@ -0,0 +1,11 @@ +def set_style_property(widget, property_name, property_value): + """Set widget's property that may affect style. + + Style of widget is polished if current property value is different. + """ + + cur_value = widget.property(property_name) + if cur_value == property_value: + return + widget.setProperty(property_name, property_value) + widget.style().polish(widget) diff --git a/common/ayon_common/connection/ui/login_window.py b/common/ayon_common/connection/ui/login_window.py new file mode 100644 index 0000000000..f2604f0466 --- /dev/null +++ b/common/ayon_common/connection/ui/login_window.py @@ -0,0 +1,753 @@ +import traceback + +from Qt import QtWidgets, QtCore, QtGui + +from ayon_api.exceptions import UrlError +from ayon_api.utils import validate_url, login_to_server + +from ayon_common.resources import ( + get_resource_path, + get_icon_path, + load_stylesheet, +) + +from .widgets import ( + PressHoverButton, + PlaceholderLineEdit, +) +from .lib import set_style_property + + +class LogoutConfirmDialog(QtWidgets.QDialog): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + self.setWindowTitle("Logout confirmation") + + message_widget = QtWidgets.QWidget(self) + + message_label = QtWidgets.QLabel( + ( + "You are going to logout. This action will close this" + " application and will invalidate your login." + " All other applications launched with this login won't be" + " able to use it anymore.

" + "You can cancel logout and only change server and user login" + " in login dialog.

" + "Press OK to confirm logout." + ), + message_widget + ) + message_label.setWordWrap(True) + + message_layout = QtWidgets.QHBoxLayout(message_widget) + message_layout.setContentsMargins(0, 0, 0, 0) + message_layout.addWidget(message_label, 1) + + sep_frame = QtWidgets.QFrame(self) + sep_frame.setObjectName("Separator") + sep_frame.setMinimumHeight(2) + sep_frame.setMaximumHeight(2) + + footer_widget = QtWidgets.QWidget(self) + + cancel_btn = QtWidgets.QPushButton("Cancel", footer_widget) + confirm_btn = QtWidgets.QPushButton("OK", footer_widget) + + footer_layout = QtWidgets.QHBoxLayout(footer_widget) + footer_layout.setContentsMargins(0, 0, 0, 0) + footer_layout.addStretch(1) + footer_layout.addWidget(cancel_btn, 0) + footer_layout.addWidget(confirm_btn, 0) + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.addWidget(message_widget, 0) + main_layout.addStretch(1) + main_layout.addWidget(sep_frame, 0) + main_layout.addWidget(footer_widget, 0) + + cancel_btn.clicked.connect(self._on_cancel_click) + confirm_btn.clicked.connect(self._on_confirm_click) + + self._cancel_btn = cancel_btn + self._confirm_btn = confirm_btn + self._result = False + + def showEvent(self, event): + super().showEvent(event) + self._match_btns_sizes() + + def resizeEvent(self, event): + super().resizeEvent(event) + self._match_btns_sizes() + + def _match_btns_sizes(self): + width = max( + self._cancel_btn.sizeHint().width(), + self._confirm_btn.sizeHint().width() + ) + self._cancel_btn.setMinimumWidth(width) + self._confirm_btn.setMinimumWidth(width) + + def _on_cancel_click(self): + self._result = False + self.reject() + + def _on_confirm_click(self): + self._result = True + self.accept() + + def get_result(self): + return self._result + + +class ServerLoginWindow(QtWidgets.QDialog): + default_width = 410 + default_height = 170 + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + icon_path = get_icon_path() + icon = QtGui.QIcon(icon_path) + self.setWindowIcon(icon) + self.setWindowTitle("Login to server") + + edit_icon_path = get_resource_path("edit.png") + edit_icon = QtGui.QIcon(edit_icon_path) + + # --- URL page --- + login_widget = QtWidgets.QWidget(self) + + user_cred_widget = QtWidgets.QWidget(login_widget) + + url_label = QtWidgets.QLabel("URL:", user_cred_widget) + + url_widget = QtWidgets.QWidget(user_cred_widget) + + url_input = PlaceholderLineEdit(url_widget) + url_input.setPlaceholderText("< https://ayon.server.com >") + + url_preview = QtWidgets.QLineEdit(url_widget) + url_preview.setReadOnly(True) + url_preview.setObjectName("LikeDisabledInput") + + url_edit_btn = PressHoverButton(user_cred_widget) + url_edit_btn.setIcon(edit_icon) + url_edit_btn.setObjectName("PasswordBtn") + + url_layout = QtWidgets.QHBoxLayout(url_widget) + url_layout.setContentsMargins(0, 0, 0, 0) + url_layout.addWidget(url_input, 1) + url_layout.addWidget(url_preview, 1) + + # --- URL separator --- + url_cred_sep = QtWidgets.QFrame(self) + url_cred_sep.setObjectName("Separator") + url_cred_sep.setMinimumHeight(2) + url_cred_sep.setMaximumHeight(2) + + # --- Login page --- + username_label = QtWidgets.QLabel("Username:", user_cred_widget) + + username_widget = QtWidgets.QWidget(user_cred_widget) + + username_input = PlaceholderLineEdit(username_widget) + username_input.setPlaceholderText("< Artist >") + + username_preview = QtWidgets.QLineEdit(username_widget) + username_preview.setReadOnly(True) + username_preview.setObjectName("LikeDisabledInput") + + username_edit_btn = PressHoverButton(user_cred_widget) + username_edit_btn.setIcon(edit_icon) + username_edit_btn.setObjectName("PasswordBtn") + + username_layout = QtWidgets.QHBoxLayout(username_widget) + username_layout.setContentsMargins(0, 0, 0, 0) + username_layout.addWidget(username_input, 1) + username_layout.addWidget(username_preview, 1) + + password_label = QtWidgets.QLabel("Password:", user_cred_widget) + password_input = PlaceholderLineEdit(user_cred_widget) + password_input.setPlaceholderText("< *********** >") + password_input.setEchoMode(password_input.Password) + + api_label = QtWidgets.QLabel("API key:", user_cred_widget) + api_preview = QtWidgets.QLineEdit(user_cred_widget) + api_preview.setReadOnly(True) + api_preview.setObjectName("LikeDisabledInput") + + show_password_icon_path = get_resource_path("eye.png") + show_password_icon = QtGui.QIcon(show_password_icon_path) + show_password_btn = PressHoverButton(user_cred_widget) + show_password_btn.setObjectName("PasswordBtn") + show_password_btn.setIcon(show_password_icon) + show_password_btn.setFocusPolicy(QtCore.Qt.ClickFocus) + + cred_msg_sep = QtWidgets.QFrame(self) + cred_msg_sep.setObjectName("Separator") + cred_msg_sep.setMinimumHeight(2) + cred_msg_sep.setMaximumHeight(2) + + # --- Credentials inputs --- + user_cred_layout = QtWidgets.QGridLayout(user_cred_widget) + user_cred_layout.setContentsMargins(0, 0, 0, 0) + row = 0 + + user_cred_layout.addWidget(url_label, row, 0, 1, 1) + user_cred_layout.addWidget(url_widget, row, 1, 1, 1) + user_cred_layout.addWidget(url_edit_btn, row, 2, 1, 1) + row += 1 + + user_cred_layout.addWidget(url_cred_sep, row, 0, 1, 3) + row += 1 + + user_cred_layout.addWidget(username_label, row, 0, 1, 1) + user_cred_layout.addWidget(username_widget, row, 1, 1, 1) + user_cred_layout.addWidget(username_edit_btn, row, 2, 2, 1) + row += 1 + + user_cred_layout.addWidget(api_label, row, 0, 1, 1) + user_cred_layout.addWidget(api_preview, row, 1, 1, 1) + row += 1 + + user_cred_layout.addWidget(password_label, row, 0, 1, 1) + user_cred_layout.addWidget(password_input, row, 1, 1, 1) + user_cred_layout.addWidget(show_password_btn, row, 2, 1, 1) + row += 1 + + user_cred_layout.addWidget(cred_msg_sep, row, 0, 1, 3) + row += 1 + + user_cred_layout.setColumnStretch(0, 0) + user_cred_layout.setColumnStretch(1, 1) + user_cred_layout.setColumnStretch(2, 0) + + login_layout = QtWidgets.QVBoxLayout(login_widget) + login_layout.setContentsMargins(0, 0, 0, 0) + login_layout.addWidget(user_cred_widget, 1) + + # --- Messages --- + # Messages for users (e.g. invalid url etc.) + message_label = QtWidgets.QLabel(self) + message_label.setWordWrap(True) + message_label.setTextInteractionFlags(QtCore.Qt.TextBrowserInteraction) + + footer_widget = QtWidgets.QWidget(self) + logout_btn = QtWidgets.QPushButton("Logout", footer_widget) + user_message = QtWidgets.QLabel(footer_widget) + login_btn = QtWidgets.QPushButton("Login", footer_widget) + confirm_btn = QtWidgets.QPushButton("Confirm", footer_widget) + + footer_layout = QtWidgets.QHBoxLayout(footer_widget) + footer_layout.setContentsMargins(0, 0, 0, 0) + footer_layout.addWidget(logout_btn, 0) + footer_layout.addWidget(user_message, 1) + footer_layout.addWidget(login_btn, 0) + footer_layout.addWidget(confirm_btn, 0) + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.addWidget(login_widget, 0) + main_layout.addWidget(message_label, 0) + main_layout.addStretch(1) + main_layout.addWidget(footer_widget, 0) + + url_input.textChanged.connect(self._on_url_change) + url_input.returnPressed.connect(self._on_url_enter_press) + username_input.textChanged.connect(self._on_user_change) + username_input.returnPressed.connect(self._on_username_enter_press) + password_input.returnPressed.connect(self._on_password_enter_press) + show_password_btn.change_state.connect(self._on_show_password) + url_edit_btn.clicked.connect(self._on_url_edit_click) + username_edit_btn.clicked.connect(self._on_username_edit_click) + logout_btn.clicked.connect(self._on_logout_click) + login_btn.clicked.connect(self._on_login_click) + confirm_btn.clicked.connect(self._on_login_click) + + self._message_label = message_label + + self._url_widget = url_widget + self._url_input = url_input + self._url_preview = url_preview + self._url_edit_btn = url_edit_btn + + self._login_widget = login_widget + + self._user_cred_widget = user_cred_widget + self._username_input = username_input + self._username_preview = username_preview + self._username_edit_btn = username_edit_btn + + self._password_label = password_label + self._password_input = password_input + self._show_password_btn = show_password_btn + self._api_label = api_label + self._api_preview = api_preview + + self._logout_btn = logout_btn + self._user_message = user_message + self._login_btn = login_btn + self._confirm_btn = confirm_btn + + self._url_is_valid = None + self._credentials_are_valid = None + self._result = (None, None, None, False) + self._first_show = True + + self._allow_logout = False + self._logged_in = False + self._url_edit_mode = False + self._username_edit_mode = False + + def set_allow_logout(self, allow_logout): + if allow_logout is self._allow_logout: + return + self._allow_logout = allow_logout + + self._update_states_by_edit_mode() + + def _set_logged_in(self, logged_in): + if logged_in is self._logged_in: + return + self._logged_in = logged_in + + self._update_states_by_edit_mode() + + def _set_url_edit_mode(self, edit_mode): + if self._url_edit_mode is not edit_mode: + self._url_edit_mode = edit_mode + self._update_states_by_edit_mode() + + def _set_username_edit_mode(self, edit_mode): + if self._username_edit_mode is not edit_mode: + self._username_edit_mode = edit_mode + self._update_states_by_edit_mode() + + def _get_url_user_edit(self): + url_edit = True + if self._logged_in and not self._url_edit_mode: + url_edit = False + user_edit = url_edit + if not user_edit and self._logged_in and self._username_edit_mode: + user_edit = True + return url_edit, user_edit + + def _update_states_by_edit_mode(self): + url_edit, user_edit = self._get_url_user_edit() + + self._url_preview.setVisible(not url_edit) + self._url_input.setVisible(url_edit) + self._url_edit_btn.setVisible(self._allow_logout and not url_edit) + + self._username_preview.setVisible(not user_edit) + self._username_input.setVisible(user_edit) + self._username_edit_btn.setVisible( + self._allow_logout and not user_edit + ) + + self._api_preview.setVisible(not user_edit) + self._api_label.setVisible(not user_edit) + + self._password_label.setVisible(user_edit) + self._show_password_btn.setVisible(user_edit) + self._password_input.setVisible(user_edit) + + self._logout_btn.setVisible(self._allow_logout and self._logged_in) + self._login_btn.setVisible(not self._allow_logout) + self._confirm_btn.setVisible(self._allow_logout) + self._update_login_btn_state(url_edit, user_edit) + + def _update_login_btn_state(self, url_edit=None, user_edit=None, url=None): + if url_edit is None: + url_edit, user_edit = self._get_url_user_edit() + + if url is None: + url = self._url_input.text() + + enabled = bool(url) and (url_edit or user_edit) + + self._login_btn.setEnabled(enabled) + self._confirm_btn.setEnabled(enabled) + + def showEvent(self, event): + super().showEvent(event) + if self._first_show: + self._first_show = False + self._on_first_show() + + def _on_first_show(self): + self.setStyleSheet(load_stylesheet()) + self.resize(self.default_width, self.default_height) + self._center_window() + if self._allow_logout is None: + self.set_allow_logout(False) + + self._update_states_by_edit_mode() + if not self._url_input.text(): + widget = self._url_input + elif not self._username_input.text(): + widget = self._username_input + else: + widget = self._password_input + + self._set_input_focus(widget) + + def result(self): + """Result url and token or login. + + Returns: + Union[Tuple[str, str], Tuple[None, None]]: Url and token used for + login if was successful otherwise are both set to None. + """ + return self._result + + def _center_window(self): + """Move window to center of screen.""" + + desktop = QtWidgets.QApplication.desktop() + screen_idx = desktop.screenNumber(self) + screen_geo = desktop.screenGeometry(screen_idx) + geo = self.frameGeometry() + geo.moveCenter(screen_geo.center()) + if geo.y() < screen_geo.y(): + geo.setY(screen_geo.y()) + + self.move(geo.topLeft()) + + def _on_url_change(self, text): + self._update_login_btn_state(url=text) + self._set_url_valid(None) + self._set_credentials_valid(None) + self._url_preview.setText(text) + + def _set_url_valid(self, valid): + if valid is self._url_is_valid: + return + + self._url_is_valid = valid + self._set_input_valid_state(self._url_input, valid) + + def _set_credentials_valid(self, valid): + if self._credentials_are_valid is valid: + return + + self._credentials_are_valid = valid + self._set_input_valid_state(self._username_input, valid) + self._set_input_valid_state(self._password_input, valid) + + def _on_url_enter_press(self): + self._set_input_focus(self._username_input) + + def _on_user_change(self, username): + self._username_preview.setText(username) + + def _on_username_enter_press(self): + self._set_input_focus(self._password_input) + + def _on_password_enter_press(self): + self._login() + + def _on_show_password(self, show_password): + if show_password: + placeholder_text = "< MySecret124 >" + echo_mode = QtWidgets.QLineEdit.Normal + else: + placeholder_text = "< *********** >" + echo_mode = QtWidgets.QLineEdit.Password + + self._password_input.setEchoMode(echo_mode) + self._password_input.setPlaceholderText(placeholder_text) + + def _on_username_edit_click(self): + self._username_edit_mode = True + self._update_states_by_edit_mode() + + def _on_url_edit_click(self): + self._url_edit_mode = True + self._update_states_by_edit_mode() + + def _on_logout_click(self): + dialog = LogoutConfirmDialog(self) + dialog.exec_() + if dialog.get_result(): + self._result = (None, None, None, True) + self.accept() + + def _on_login_click(self): + self._login() + + def _validate_url(self): + """Use url from input to connect and change window state on success. + + Todos: + Threaded check. + """ + + url = self._url_input.text() + valid_url = None + try: + valid_url = validate_url(url) + + except UrlError as exc: + parts = [f"{exc.title}"] + parts.extend(f"- {hint}" for hint in exc.hints) + self._set_message("
".join(parts)) + + except KeyboardInterrupt: + # Reraise KeyboardInterrupt error + raise + + except BaseException: + self._set_unexpected_error() + return + + if valid_url is None: + return False + + self._url_input.setText(valid_url) + return True + + def _login(self): + if ( + not self._login_btn.isEnabled() + and not self._confirm_btn.isEnabled() + ): + return + + if not self._url_is_valid: + self._set_url_valid(self._validate_url()) + + if not self._url_is_valid: + self._set_input_focus(self._url_input) + self._set_credentials_valid(None) + return + + self._clear_message() + + url = self._url_input.text() + username = self._username_input.text() + password = self._password_input.text() + try: + token = login_to_server(url, username, password) + except BaseException: + self._set_unexpected_error() + return + + if token is not None: + self._result = (url, token, username, False) + self.accept() + return + + self._set_credentials_valid(False) + message_lines = ["Invalid credentials"] + if not username.strip(): + message_lines.append("- Username is not filled") + + if not password.strip(): + message_lines.append("- Password is not filled") + + if username and password: + message_lines.append("- Check your credentials") + + self._set_message("
".join(message_lines)) + self._set_input_focus(self._username_input) + + def _set_input_focus(self, widget): + widget.setFocus(QtCore.Qt.MouseFocusReason) + + def _set_input_valid_state(self, widget, valid): + state = "" + if valid is True: + state = "valid" + elif valid is False: + state = "invalid" + set_style_property(widget, "state", state) + + def _set_message(self, message): + self._message_label.setText(message) + + def _clear_message(self): + self._message_label.setText("") + + def _set_unexpected_error(self): + # TODO add traceback somewhere + # - maybe a button to show or copy? + traceback.print_exc() + lines = [ + "Unexpected error happened", + "- Can be caused by wrong url (leading elsewhere)" + ] + self._set_message("
".join(lines)) + + def set_url(self, url): + self._url_preview.setText(url) + self._url_input.setText(url) + self._validate_url() + + def set_username(self, username): + self._username_preview.setText(username) + self._username_input.setText(username) + + def _set_api_key(self, api_key): + if not api_key or len(api_key) < 3: + self._api_preview.setText(api_key or "") + return + + api_key_len = len(api_key) + offset = 6 + if api_key_len < offset: + offset = api_key_len // 2 + api_key = api_key[:offset] + "." * (api_key_len - offset) + + self._api_preview.setText(api_key) + + def set_logged_in( + self, + logged_in, + url=None, + username=None, + api_key=None, + allow_logout=None + ): + if url is not None: + self.set_url(url) + + if username is not None: + self.set_username(username) + + if api_key: + self._set_api_key(api_key) + + if logged_in and allow_logout is None: + allow_logout = True + + self._set_logged_in(logged_in) + + if allow_logout: + self.set_allow_logout(True) + elif allow_logout is False: + self.set_allow_logout(False) + + +def ask_to_login(url=None, username=None, always_on_top=False): + """Ask user to login using Qt dialog. + + Function creates new QApplication if is not created yet. + + Args: + url (Optional[str]): Server url that will be prefilled in dialog. + username (Optional[str]): Username that will be prefilled in dialog. + always_on_top (Optional[bool]): Window will be drawn on top of + other windows. + + Returns: + tuple[str, str, str]: Returns Url, user's token and username. Url can + be changed during dialog lifetime that's why the url is returned. + """ + + app_instance = QtWidgets.QApplication.instance() + if app_instance is None: + for attr_name in ( + "AA_EnableHighDpiScaling", + "AA_UseHighDpiPixmaps" + ): + attr = getattr(QtCore.Qt, attr_name, None) + if attr is not None: + QtWidgets.QApplication.setAttribute(attr) + app_instance = QtWidgets.QApplication([]) + + window = ServerLoginWindow() + if always_on_top: + window.setWindowFlags( + window.windowFlags() + | QtCore.Qt.WindowStaysOnTopHint + ) + + if url: + window.set_url(url) + + if username: + window.set_username(username) + + _output = {"out": None} + + def _exec_window(): + window.exec_() + result = window.result() + out_url, out_token, out_username, _logged_out = result + _output["out"] = out_url, out_token, out_username + return _output["out"] + + # Use QTimer to exec dialog if application is not running yet + # - it is not possible to call 'exec_' on dialog without running app + # - it is but the window is stuck + if not app_instance.startingUp(): + return _exec_window() + + timer = QtCore.QTimer() + timer.setSingleShot(True) + timer.timeout.connect(_exec_window) + timer.start() + # This can become main Qt loop. Maybe should live elsewhere + app_instance.exec_() + + return _output["out"] + + +def change_user(url, username, api_key, always_on_top=False): + """Ask user to login using Qt dialog. + + Function creates new QApplication if is not created yet. + + Args: + url (str): Server url that will be prefilled in dialog. + username (str): Username that will be prefilled in dialog. + api_key (str): API key that will be prefilled in dialog. + always_on_top (Optional[bool]): Window will be drawn on top of + other windows. + + Returns: + Tuple[str, str]: Returns Url and user's token. Url can be changed + during dialog lifetime that's why the url is returned. + """ + + app_instance = QtWidgets.QApplication.instance() + if app_instance is None: + for attr_name in ( + "AA_EnableHighDpiScaling", + "AA_UseHighDpiPixmaps" + ): + attr = getattr(QtCore.Qt, attr_name, None) + if attr is not None: + QtWidgets.QApplication.setAttribute(attr) + app_instance = QtWidgets.QApplication([]) + + window = ServerLoginWindow() + if always_on_top: + window.setWindowFlags( + window.windowFlags() + | QtCore.Qt.WindowStaysOnTopHint + ) + window.set_logged_in(True, url, username, api_key) + + _output = {"out": None} + + def _exec_window(): + window.exec_() + _output["out"] = window.result() + return _output["out"] + + # Use QTimer to exec dialog if application is not running yet + # - it is not possible to call 'exec_' on dialog without running app + # - it is but the window is stuck + if not app_instance.startingUp(): + return _exec_window() + + timer = QtCore.QTimer() + timer.setSingleShot(True) + timer.timeout.connect(_exec_window) + timer.start() + # This can become main Qt loop. Maybe should live elsewhere + app_instance.exec_() + return _output["out"] diff --git a/common/ayon_common/connection/ui/widgets.py b/common/ayon_common/connection/ui/widgets.py new file mode 100644 index 0000000000..04c6a8e5f2 --- /dev/null +++ b/common/ayon_common/connection/ui/widgets.py @@ -0,0 +1,47 @@ +from Qt import QtWidgets, QtCore, QtGui + + +class PressHoverButton(QtWidgets.QPushButton): + """Keep track about mouse press/release and enter/leave.""" + + _mouse_pressed = False + _mouse_hovered = False + change_state = QtCore.Signal(bool) + + def mousePressEvent(self, event): + self._mouse_pressed = True + self._mouse_hovered = True + self.change_state.emit(self._mouse_hovered) + super(PressHoverButton, self).mousePressEvent(event) + + def mouseReleaseEvent(self, event): + self._mouse_pressed = False + self._mouse_hovered = False + self.change_state.emit(self._mouse_hovered) + super(PressHoverButton, self).mouseReleaseEvent(event) + + def mouseMoveEvent(self, event): + mouse_pos = self.mapFromGlobal(QtGui.QCursor.pos()) + under_mouse = self.rect().contains(mouse_pos) + if under_mouse != self._mouse_hovered: + self._mouse_hovered = under_mouse + self.change_state.emit(self._mouse_hovered) + + super(PressHoverButton, self).mouseMoveEvent(event) + + +class PlaceholderLineEdit(QtWidgets.QLineEdit): + """Set placeholder color of QLineEdit in Qt 5.12 and higher.""" + + def __init__(self, *args, **kwargs): + super(PlaceholderLineEdit, self).__init__(*args, **kwargs) + # Change placeholder palette color + if hasattr(QtGui.QPalette, "PlaceholderText"): + filter_palette = self.palette() + color = QtGui.QColor("#D3D8DE") + color.setAlpha(67) + filter_palette.setColor( + QtGui.QPalette.PlaceholderText, + color + ) + self.setPalette(filter_palette) diff --git a/common/openpype_common/distribution/README.md b/common/ayon_common/distribution/README.md similarity index 100% rename from common/openpype_common/distribution/README.md rename to common/ayon_common/distribution/README.md diff --git a/common/ayon_common/distribution/__init__.py b/common/ayon_common/distribution/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/common/ayon_common/distribution/addon_distribution.py b/common/ayon_common/distribution/addon_distribution.py new file mode 100644 index 0000000000..dba6c20193 --- /dev/null +++ b/common/ayon_common/distribution/addon_distribution.py @@ -0,0 +1,1189 @@ +import os +import sys +import json +import traceback +import collections +import datetime +from enum import Enum +from abc import abstractmethod +import attr +import logging +import platform +import shutil +import threading +from abc import ABCMeta + +import ayon_api + +from ayon_common.utils import get_ayon_appdirs +from .file_handler import RemoteFileHandler +from .addon_info import ( + AddonInfo, + UrlType, + DependencyItem, +) + + +class UpdateState(Enum): + UNKNOWN = "unknown" + UPDATED = "udated" + OUTDATED = "outdated" + UPDATE_FAILED = "failed" + MISS_SOURCE_FILES = "miss_source_files" + + +def get_local_dir(*subdirs): + """Get product directory in user's home directory. + + Each user on machine have own local directory where are downloaded updates, + addons etc. + + Returns: + str: Path to product local directory. + """ + + if not subdirs: + raise ValueError("Must fill dir_name if nothing else provided!") + + local_dir = get_ayon_appdirs(*subdirs) + if not os.path.isdir(local_dir): + try: + os.makedirs(local_dir) + except Exception: # TODO fix exception + raise RuntimeError(f"Cannot create {local_dir}") + + return local_dir + + +def get_addons_dir(): + """Directory where addon packages are stored. + + Path to addons is defined using python module 'appdirs' which + + The path is stored into environment variable 'AYON_ADDONS_DIR'. + Value of environment variable can be overriden, but we highly recommended + to use that option only for development purposes. + + Returns: + str: Path to directory where addons should be downloaded. + """ + + addons_dir = os.environ.get("AYON_ADDONS_DIR") + if not addons_dir: + addons_dir = get_local_dir("addons") + os.environ["AYON_ADDONS_DIR"] = addons_dir + return addons_dir + + +def get_dependencies_dir(): + """Directory where dependency packages are stored. + + Path to addons is defined using python module 'appdirs' which + + The path is stored into environment variable 'AYON_DEPENDENCIES_DIR'. + Value of environment variable can be overriden, but we highly recommended + to use that option only for development purposes. + + Returns: + str: Path to directory where dependency packages should be downloaded. + """ + + dependencies_dir = os.environ.get("AYON_DEPENDENCIES_DIR") + if not dependencies_dir: + dependencies_dir = get_local_dir("dependency_packages") + os.environ["AYON_DEPENDENCIES_DIR"] = dependencies_dir + return dependencies_dir + + +class SourceDownloader(metaclass=ABCMeta): + log = logging.getLogger(__name__) + + @classmethod + @abstractmethod + def download(cls, source, destination_dir, data, transfer_progress): + """Returns url to downloaded addon zip file. + + Tranfer progress can be ignored, in that case file transfer won't + be shown as 0-100% but as 'running'. First step should be to set + destination content size and then add transferred chunk sizes. + + Args: + source (dict): {type:"http", "url":"https://} ...} + destination_dir (str): local folder to unzip + data (dict): More information about download content. Always have + 'type' key in. + transfer_progress (ayon_api.TransferProgress): Progress of + transferred (copy/download) content. + + Returns: + (str) local path to addon zip file + """ + + pass + + @classmethod + @abstractmethod + def cleanup(cls, source, destination_dir, data): + """Cleanup files when distribution finishes or crashes. + + Cleanup e.g. temporary files (downloaded zip) or other related stuff + to downloader. + """ + + pass + + @classmethod + def check_hash(cls, addon_path, addon_hash, hash_type="sha256"): + """Compares 'hash' of downloaded 'addon_url' file. + + Args: + addon_path (str): Local path to addon file. + addon_hash (str): Hash of downloaded file. + hash_type (str): Type of hash. + + Raises: + ValueError if hashes doesn't match + """ + + if not os.path.exists(addon_path): + raise ValueError(f"{addon_path} doesn't exist.") + if not RemoteFileHandler.check_integrity(addon_path, + addon_hash, + hash_type=hash_type): + raise ValueError(f"{addon_path} doesn't match expected hash.") + + @classmethod + def unzip(cls, addon_zip_path, destination_dir): + """Unzips local 'addon_zip_path' to 'destination'. + + Args: + addon_zip_path (str): local path to addon zip file + destination_dir (str): local folder to unzip + """ + + RemoteFileHandler.unzip(addon_zip_path, destination_dir) + os.remove(addon_zip_path) + + +class DownloadFactory: + def __init__(self): + self._downloaders = {} + + def register_format(self, downloader_type, downloader): + """Register downloader for download type. + + Args: + downloader_type (UrlType): Type of source. + downloader (SourceDownloader): Downloader which cares about + download, hash check and unzipping. + """ + + self._downloaders[downloader_type.value] = downloader + + def get_downloader(self, downloader_type): + """Registered downloader for type. + + Args: + downloader_type (UrlType): Type of source. + + Returns: + SourceDownloader: Downloader object which should care about file + distribution. + + Raises: + ValueError: If type does not have registered downloader. + """ + + if downloader := self._downloaders.get(downloader_type): + return downloader() + raise ValueError(f"{downloader_type} not implemented") + + +class OSDownloader(SourceDownloader): + @classmethod + def download(cls, source, destination_dir, data, transfer_progress): + # OS doesn't need to download, unzip directly + addon_url = source["path"].get(platform.system().lower()) + if not os.path.exists(addon_url): + raise ValueError(f"{addon_url} is not accessible") + return addon_url + + @classmethod + def cleanup(cls, source, destination_dir, data): + # Nothing to do - download does not copy anything + pass + + +class HTTPDownloader(SourceDownloader): + CHUNK_SIZE = 100000 + + @staticmethod + def get_filename(source): + source_url = source["url"] + filename = source.get("filename") + if not filename: + filename = os.path.basename(source_url) + basename, ext = os.path.splitext(filename) + allowed_exts = set(RemoteFileHandler.IMPLEMENTED_ZIP_FORMATS) + if ext.replace(".", "") not in allowed_exts: + filename = f"{basename}.zip" + return filename + + @classmethod + def download(cls, source, destination_dir, data, transfer_progress): + source_url = source["url"] + cls.log.debug(f"Downloading {source_url} to {destination_dir}") + headers = source.get("headers") + filename = cls.get_filename(source) + + # TODO use transfer progress + RemoteFileHandler.download_url( + source_url, + destination_dir, + filename, + headers=headers + ) + + return os.path.join(destination_dir, filename) + + @classmethod + def cleanup(cls, source, destination_dir, data): + # Nothing to do - download does not copy anything + filename = cls.get_filename(source) + filepath = os.path.join(destination_dir, filename) + if os.path.exists(filepath) and os.path.isfile(filepath): + os.remove(filepath) + + +class AyonServerDownloader(SourceDownloader): + """Downloads static resource file from v4 Server. + + Expects filled env var AYON_SERVER_URL. + """ + + CHUNK_SIZE = 8192 + + @classmethod + def download(cls, source, destination_dir, data, transfer_progress): + path = source["path"] + filename = source["filename"] + if path and not filename: + filename = path.split("/")[-1] + + cls.log.debug(f"Downloading {filename} to {destination_dir}") + + _, ext = os.path.splitext(filename) + clear_ext = ext.lower().replace(".", "") + valid_exts = set(RemoteFileHandler.IMPLEMENTED_ZIP_FORMATS) + if clear_ext not in valid_exts: + raise ValueError( + "Invalid file extension \"{}\". Expected {}".format( + clear_ext, ", ".join(valid_exts) + )) + + if path: + filepath = os.path.join(destination_dir, filename) + return ayon_api.download_file( + path, + filepath, + chunk_size=cls.CHUNK_SIZE, + progress=transfer_progress + ) + + # dst_filepath = os.path.join(destination_dir, filename) + if data["type"] == "dependency_package": + return ayon_api.download_dependency_package( + data["name"], + destination_dir, + filename, + platform_name=data["platform"], + chunk_size=cls.CHUNK_SIZE, + progress=transfer_progress + ) + + if data["type"] == "addon": + return ayon_api.download_addon_private_file( + data["name"], + data["version"], + filename, + destination_dir, + chunk_size=cls.CHUNK_SIZE, + progress=transfer_progress + ) + + raise ValueError(f"Unknown type to download \"{data['type']}\"") + + @classmethod + def cleanup(cls, source, destination_dir, data): + # Nothing to do - download does not copy anything + filename = source["filename"] + filepath = os.path.join(destination_dir, filename) + if os.path.exists(filepath) and os.path.isfile(filepath): + os.remove(filepath) + + +def get_addons_info(): + """Returns list of addon information from Server + + Returns: + List[AddonInfo]: List of metadata for addons sent from server, + parsed in AddonInfo objects + """ + + addons_info = [] + for addon in ayon_api.get_addons_info(details=True)["addons"]: + addon_info = AddonInfo.from_dict(addon) + if addon_info is not None: + addons_info.append(addon_info) + return addons_info + + +def get_dependency_package(package_name=None): + """Returns info about currently used dependency package. + + Dependency package means .venv created from all activated addons from the + server (plus libraries for core Tray app TODO confirm). + This package needs to be downloaded, unpacked and added to sys.path for + Tray app to work. + + Args: + package_name (str): Name of package. Production package name is used + if not entered. + + Returns: + Union[DependencyItem, None]: Item or None if package with the name was + not found. + """ + + dependencies_info = ayon_api.get_dependencies_info() + + dependency_list = dependencies_info["packages"] + # Use production package if package is not specified + if package_name is None: + package_name = dependencies_info["productionPackage"] + + for dependency in dependency_list: + dependency_package = DependencyItem.from_dict(dependency) + if dependency_package.name == package_name: + return dependency_package + + +class DistributeTransferProgress: + """Progress of single source item in 'DistributionItem'. + + The item is to keep track of single source item. + """ + + def __init__(self): + self._transfer_progress = ayon_api.TransferProgress() + self._started = False + self._failed = False + self._fail_reason = None + self._unzip_started = False + self._unzip_finished = False + self._hash_check_started = False + self._hash_check_finished = False + + def set_started(self): + """Call when source distribution starts.""" + + self._started = True + + def set_failed(self, reason): + """Set source distribution as failed. + + Args: + reason (str): Error message why the transfer failed. + """ + + self._failed = True + self._fail_reason = reason + + def set_hash_check_started(self): + """Call just before hash check starts.""" + + self._hash_check_started = True + + def set_hash_check_finished(self): + """Call just after hash check finishes.""" + + self._hash_check_finished = True + + def set_unzip_started(self): + """Call just before unzip starts.""" + + self._unzip_started = True + + def set_unzip_finished(self): + """Call just after unzip finishes.""" + + self._unzip_finished = True + + @property + def is_running(self): + """Source distribution is in progress. + + Returns: + bool: Transfer is in progress. + """ + + return bool( + self._started + and not self._failed + and not self._hash_check_finished + ) + + @property + def transfer_progress(self): + """Source file 'download' progress tracker. + + Returns: + ayon_api.TransferProgress.: Content download progress. + """ + + return self._transfer_progress + + @property + def started(self): + return self._started + + @property + def hash_check_started(self): + return self._hash_check_started + + @property + def hash_check_finished(self): + return self._has_check_finished + + @property + def unzip_started(self): + return self._unzip_started + + @property + def unzip_finished(self): + return self._unzip_finished + + @property + def failed(self): + return self._failed or self._transfer_progress.failed + + @property + def fail_reason(self): + return self._fail_reason or self._transfer_progress.fail_reason + + +class DistributionItem: + """Distribution item with sources and target directories. + + Distribution item can be an addon or dependency package. Distribution item + can be already distributed and don't need any progression. The item keeps + track of the progress. The reason is to be able to use the distribution + items as source data for UI without implementing the same logic. + + Distribution is "state" based. Distribution can be 'UPDATED' or 'OUTDATED' + at the initialization. If item is 'UPDATED' the distribution is skipped + and 'OUTDATED' will trigger the distribution process. + + Because the distribution may have multiple sources each source has own + progress item. + + Args: + state (UpdateState): Initial state (UpdateState.UPDATED or + UpdateState.OUTDATED). + unzip_dirpath (str): Path to directory where zip is downloaded. + download_dirpath (str): Path to directory where file is unzipped. + file_hash (str): Hash of file for validation. + factory (DownloadFactory): Downloaders factory object. + sources (List[SourceInfo]): Possible sources to receive the + distribution item. + downloader_data (Dict[str, Any]): More information for downloaders. + item_label (str): Label used in log outputs (and in UI). + logger (logging.Logger): Logger object. + """ + + def __init__( + self, + state, + unzip_dirpath, + download_dirpath, + file_hash, + factory, + sources, + downloader_data, + item_label, + logger=None, + ): + if logger is None: + logger = logging.getLogger(self.__class__.__name__) + self.log = logger + self.state = state + self.unzip_dirpath = unzip_dirpath + self.download_dirpath = download_dirpath + self.file_hash = file_hash + self.factory = factory + self.sources = [ + (source, DistributeTransferProgress()) + for source in sources + ] + self.downloader_data = downloader_data + self.item_label = item_label + + self._need_distribution = state != UpdateState.UPDATED + self._current_source_progress = None + self._used_source_progress = None + self._used_source = None + self._dist_started = False + self._dist_finished = False + + self._error_msg = None + self._error_detail = None + + @property + def need_distribution(self): + """Need distribution based on initial state. + + Returns: + bool: Need distribution. + """ + + return self._need_distribution + + @property + def current_source_progress(self): + """Currently processed source progress object. + + Returns: + Union[DistributeTransferProgress, None]: Transfer progress or None. + """ + + return self._current_source_progress + + @property + def used_source_progress(self): + """Transfer progress that successfully distributed the item. + + Returns: + Union[DistributeTransferProgress, None]: Transfer progress or None. + """ + + return self._used_source_progress + + @property + def used_source(self): + """Data of source item. + + Returns: + Union[Dict[str, Any], None]: SourceInfo data or None. + """ + + return self._used_source + + @property + def error_message(self): + """Reason why distribution item failed. + + Returns: + Union[str, None]: Error message. + """ + + return self._error_msg + + @property + def error_detail(self): + """Detailed reason why distribution item failed. + + Returns: + Union[str, None]: Detailed information (maybe traceback). + """ + + return self._error_detail + + def _distribute(self): + if not self.sources: + message = ( + f"{self.item_label}: Don't have" + " any sources to download from." + ) + self.log.error(message) + self._error_msg = message + self.state = UpdateState.MISS_SOURCE_FILES + return + + download_dirpath = self.download_dirpath + unzip_dirpath = self.unzip_dirpath + for source, source_progress in self.sources: + self._current_source_progress = source_progress + source_progress.set_started() + + # Remove directory if exists + if os.path.isdir(unzip_dirpath): + self.log.debug(f"Cleaning {unzip_dirpath}") + shutil.rmtree(unzip_dirpath) + + # Create directory + os.makedirs(unzip_dirpath) + if not os.path.isdir(download_dirpath): + os.makedirs(download_dirpath) + + try: + downloader = self.factory.get_downloader(source.type) + except Exception: + source_progress.set_failed(f"Unknown downloader {source.type}") + self.log.warning(message, exc_info=True) + continue + + source_data = attr.asdict(source) + cleanup_args = ( + source_data, + download_dirpath, + self.downloader_data + ) + + try: + zip_filepath = downloader.download( + source_data, + download_dirpath, + self.downloader_data, + source_progress.transfer_progress, + ) + except Exception: + message = "Failed to download source" + source_progress.set_failed(message) + self.log.warning( + f"{self.item_label}: {message}", + exc_info=True + ) + downloader.cleanup(*cleanup_args) + continue + + source_progress.set_hash_check_started() + try: + downloader.check_hash(zip_filepath, self.file_hash) + except Exception: + message = "File hash does not match" + source_progress.set_failed(message) + self.log.warning( + f"{self.item_label}: {message}", + exc_info=True + ) + downloader.cleanup(*cleanup_args) + continue + + source_progress.set_hash_check_finished() + source_progress.set_unzip_started() + try: + downloader.unzip(zip_filepath, unzip_dirpath) + except Exception: + message = "Couldn't unzip source file" + source_progress.set_failed(message) + self.log.warning( + f"{self.item_label}: {message}", + exc_info=True + ) + downloader.cleanup(*cleanup_args) + continue + + source_progress.set_unzip_finished() + downloader.cleanup(*cleanup_args) + self.state = UpdateState.UPDATED + self._used_source = source_data + break + + last_progress = self._current_source_progress + self._current_source_progress = None + if self.state == UpdateState.UPDATED: + self._used_source_progress = last_progress + self.log.info(f"{self.item_label}: Distributed") + return + + self.log.error(f"{self.item_label}: Failed to distribute") + self._error_msg = "Failed to receive or install source files" + + def distribute(self): + """Execute distribution logic.""" + + if not self.need_distribution or self._dist_started: + return + + self._dist_started = True + try: + if self.state == UpdateState.OUTDATED: + self._distribute() + + except Exception as exc: + self.state = UpdateState.UPDATE_FAILED + self._error_msg = str(exc) + self._error_detail = "".join( + traceback.format_exception(*sys.exc_info()) + ) + self.log.error( + f"{self.item_label}: Distibution filed", + exc_info=True + ) + + finally: + self._dist_finished = True + if self.state == UpdateState.OUTDATED: + self.state = UpdateState.UPDATE_FAILED + self._error_msg = "Distribution failed" + + if ( + self.state != UpdateState.UPDATED + and self.unzip_dirpath + and os.path.isdir(self.unzip_dirpath) + ): + self.log.debug(f"Cleaning {self.unzip_dirpath}") + shutil.rmtree(self.unzip_dirpath) + + +class AyonDistribution: + """Distribution control. + + Receive information from server what addons and dependency packages + should be available locally and prepare/validate their distribution. + + Arguments are available for testing of the class. + + Args: + addon_dirpath (str): Where addons will be stored. + dependency_dirpath (str): Where dependencies will be stored. + dist_factory (DownloadFactory): Factory which cares about downloading + of items based on source type. + addons_info (List[AddonInfo]): List of prepared addons info. + dependency_package_info (Union[Dict[str, Any], None]): Dependency + package info from server. Defaults to '-1'. + """ + + def __init__( + self, + addon_dirpath=None, + dependency_dirpath=None, + dist_factory=None, + addons_info=None, + dependency_package_info=-1, + ): + self._addons_dirpath = addon_dirpath or get_addons_dir() + self._dependency_dirpath = dependency_dirpath or get_dependencies_dir() + self._dist_factory = ( + dist_factory or get_default_download_factory() + ) + + if isinstance(addons_info, list): + addons_info = {item.full_name: item for item in addons_info} + self._dist_started = False + self._dist_finished = False + self._log = None + self._addons_info = addons_info + self._addons_dist_items = None + self._dependency_package = dependency_package_info + self._dependency_dist_item = -1 + + @property + def log(self): + if self._log is None: + self._log = logging.getLogger(self.__class__.__name__) + return self._log + + @property + def addons_info(self): + """Information about available addons on server. + + Addons may require distribution of files. For those addons will be + created 'DistributionItem' handling distribution itself. + + Todos: + Add support for staging versions. Right now is supported only + production version. + + Returns: + Dict[str, AddonInfo]: Addon info by full name. + """ + + if self._addons_info is None: + addons_info = {} + server_addons_info = ayon_api.get_addons_info(details=True) + for addon in server_addons_info["addons"]: + addon_info = AddonInfo.from_dict(addon) + if addon_info is None: + continue + addons_info[addon_info.full_name] = addon_info + + self._addons_info = addons_info + return self._addons_info + + @property + def dependency_package(self): + """Information about dependency package from server. + + Receive and cache dependency package information from server. + + Notes: + For testing purposes it is possible to pass dependency package + information to '__init__'. + + Returns: + Union[None, Dict[str, Any]]: None if server does not have specified + dependency package. + """ + + if self._dependency_package == -1: + self._dependency_package = get_dependency_package() + return self._dependency_package + + def _prepare_current_addons_dist_items(self): + addons_metadata = self.get_addons_metadata() + output = {} + for full_name, addon_info in self.addons_info.items(): + if not addon_info.require_distribution: + continue + addon_dest = os.path.join(self._addons_dirpath, full_name) + self.log.debug(f"Checking {full_name} in {addon_dest}") + addon_in_metadata = ( + addon_info.name in addons_metadata + and addon_info.version in addons_metadata[addon_info.name] + ) + if addon_in_metadata and os.path.isdir(addon_dest): + self.log.debug( + f"Addon version folder {addon_dest} already exists." + ) + state = UpdateState.UPDATED + + else: + state = UpdateState.OUTDATED + + downloader_data = { + "type": "addon", + "name": addon_info.name, + "version": addon_info.version + } + + output[full_name] = DistributionItem( + state, + addon_dest, + addon_dest, + addon_info.hash, + self._dist_factory, + list(addon_info.sources), + downloader_data, + full_name, + self.log + ) + return output + + def _prepare_dependency_progress(self): + package = self.dependency_package + if package is None or not package.require_distribution: + return None + + metadata = self.get_dependency_metadata() + downloader_data = { + "type": "dependency_package", + "name": package.name, + "platform": package.platform + } + zip_dir = package_dir = os.path.join( + self._dependency_dirpath, package.name + ) + self.log.debug(f"Checking {package.name} in {package_dir}") + + if not os.path.isdir(package_dir) or package.name not in metadata: + state = UpdateState.OUTDATED + else: + state = UpdateState.UPDATED + + return DistributionItem( + state, + zip_dir, + package_dir, + package.checksum, + self._dist_factory, + package.sources, + downloader_data, + package.name, + self.log, + ) + + def get_addons_dist_items(self): + """Addon distribution items. + + These items describe source files required by addon to be available on + machine. Each item may have 0-n source information from where can be + obtained. If file is already available it's state will be 'UPDATED'. + + Returns: + Dict[str, DistributionItem]: Distribution items by addon fullname. + """ + + if self._addons_dist_items is None: + self._addons_dist_items = self._prepare_current_addons_dist_items() + return self._addons_dist_items + + def get_dependency_dist_item(self): + """Dependency package distribution item. + + Item describe source files required by server to be available on + machine. Item may have 0-n source information from where can be + obtained. If file is already available it's state will be 'UPDATED'. + + 'None' is returned if server does not have defined any dependency + package. + + Returns: + Union[None, DistributionItem]: Dependency item or None if server + does not have specified any dependency package. + """ + + if self._dependency_dist_item == -1: + self._dependency_dist_item = self._prepare_dependency_progress() + return self._dependency_dist_item + + def get_dependency_metadata_filepath(self): + """Path to distribution metadata file. + + Metadata contain information about distributed packages, used source, + expected file hash and time when file was distributed. + + Returns: + str: Path to a file where dependency package metadata are stored. + """ + + return os.path.join(self._dependency_dirpath, "dependency.json") + + def get_addons_metadata_filepath(self): + """Path to addons metadata file. + + Metadata contain information about distributed addons, used sources, + expected file hashes and time when files were distributed. + + Returns: + str: Path to a file where addons metadata are stored. + """ + + return os.path.join(self._addons_dirpath, "addons.json") + + def read_metadata_file(self, filepath, default_value=None): + """Read json file from path. + + Method creates the file when does not exist with default value. + + Args: + filepath (str): Path to json file. + default_value (Union[Dict[str, Any], List[Any], None]): Default + value if the file is not available (or valid). + + Returns: + Union[Dict[str, Any], List[Any]]: Value from file. + """ + + if default_value is None: + default_value = {} + + if not os.path.exists(filepath): + return default_value + + try: + with open(filepath, "r") as stream: + data = json.load(stream) + except ValueError: + data = default_value + return data + + def save_metadata_file(self, filepath, data): + """Store data to json file. + + Method creates the file when does not exist. + + Args: + filepath (str): Path to json file. + data (Union[Dict[str, Any], List[Any]]): Data to store into file. + """ + + if not os.path.exists(filepath): + dirpath = os.path.dirname(filepath) + if not os.path.exists(dirpath): + os.makedirs(dirpath) + with open(filepath, "w") as stream: + json.dump(data, stream, indent=4) + + def get_dependency_metadata(self): + filepath = self.get_dependency_metadata_filepath() + return self.read_metadata_file(filepath, {}) + + def update_dependency_metadata(self, package_name, data): + dependency_metadata = self.get_dependency_metadata() + dependency_metadata[package_name] = data + filepath = self.get_dependency_metadata_filepath() + self.save_metadata_file(filepath, dependency_metadata) + + def get_addons_metadata(self): + filepath = self.get_addons_metadata_filepath() + return self.read_metadata_file(filepath, {}) + + def update_addons_metadata(self, addons_information): + if not addons_information: + return + addons_metadata = self.get_addons_metadata() + for addon_name, version_value in addons_information.items(): + if addon_name not in addons_metadata: + addons_metadata[addon_name] = {} + for addon_version, version_data in version_value.items(): + addons_metadata[addon_name][addon_version] = version_data + + filepath = self.get_addons_metadata_filepath() + self.save_metadata_file(filepath, addons_metadata) + + def finish_distribution(self): + """Store metadata about distributed items.""" + + self._dist_finished = True + stored_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + dependency_dist_item = self.get_dependency_dist_item() + if ( + dependency_dist_item is not None + and dependency_dist_item.need_distribution + and dependency_dist_item.state == UpdateState.UPDATED + ): + package = self.dependency_package + source = dependency_dist_item.used_source + if source is not None: + data = { + "source": source, + "file_hash": dependency_dist_item.file_hash, + "distributed_dt": stored_time + } + self.update_dependency_metadata(package.name, data) + + addons_info = {} + for full_name, dist_item in self.get_addons_dist_items().items(): + if ( + not dist_item.need_distribution + or dist_item.state != UpdateState.UPDATED + ): + continue + + source_data = dist_item.used_source + if not source_data: + continue + addon_info = self.addons_info[full_name] + if addon_info.name not in addons_info: + addons_info[addon_info.name] = {} + addons_info[addon_info.name][addon_info.version] = { + "source": source_data, + "file_hash": dist_item.file_hash, + "distributed_dt": stored_time + } + + self.update_addons_metadata(addons_info) + + def get_all_distribution_items(self): + """Distribution items required by server. + + Items contain dependency package item and all addons that are enabled + and have distribution requirements. + + Items can be already available on machine. + + Returns: + List[DistributionItem]: Distribution items required by server. + """ + + output = [] + dependency_dist_item = self.get_dependency_dist_item() + if dependency_dist_item is not None: + output.append(dependency_dist_item) + for dist_item in self.get_addons_dist_items().values(): + output.append(dist_item) + return output + + def distribute(self, threaded=False): + """Distribute all missing items. + + Method will try to distribute all items that are required by server. + + This method does not handle failed items. To validate the result call + 'validate_distribution' when this method finishes. + + Args: + threaded (bool): Distribute items in threads. + """ + + if self._dist_started: + raise RuntimeError("Distribution already started") + self._dist_started = True + threads = collections.deque() + for item in self.get_all_distribution_items(): + if threaded: + threads.append(threading.Thread(target=item.distribute)) + else: + item.distribute() + + while threads: + thread = threads.popleft() + if thread.is_alive(): + threads.append(thread) + else: + thread.join() + + self.finish_distribution() + + def validate_distribution(self): + """Check if all required distribution items are distributed. + + Raises: + RuntimeError: Any of items is not available. + """ + + invalid = [] + dependency_package = self.get_dependency_dist_item() + if ( + dependency_package is not None + and dependency_package.state != UpdateState.UPDATED + ): + invalid.append("Dependency package") + + for addon_name, dist_item in self.get_addons_dist_items().items(): + if dist_item.state != UpdateState.UPDATED: + invalid.append(addon_name) + + if not invalid: + return + + raise RuntimeError("Failed to distribute {}".format( + ", ".join([f'"{item}"' for item in invalid]) + )) + + def get_sys_paths(self): + """Get all paths to python packages that should be added to python. + + These paths lead to addon directories and python dependencies in + dependency package. + + Todos: + Add dependency package directory to output. ATM is not structure of + dependency package 100% defined. + + Returns: + List[str]: Paths that should be added to 'sys.path' and + 'PYTHONPATH'. + """ + + output = [] + for item in self.get_all_distribution_items(): + if item.state != UpdateState.UPDATED: + continue + unzip_dirpath = item.unzip_dirpath + if unzip_dirpath and os.path.exists(unzip_dirpath): + output.append(unzip_dirpath) + return output + + +def get_default_download_factory(): + download_factory = DownloadFactory() + download_factory.register_format(UrlType.FILESYSTEM, OSDownloader) + download_factory.register_format(UrlType.HTTP, HTTPDownloader) + download_factory.register_format(UrlType.SERVER, AyonServerDownloader) + return download_factory + + +def cli(*args): + raise NotImplementedError diff --git a/common/ayon_common/distribution/addon_info.py b/common/ayon_common/distribution/addon_info.py new file mode 100644 index 0000000000..6da6f11ead --- /dev/null +++ b/common/ayon_common/distribution/addon_info.py @@ -0,0 +1,177 @@ +import attr +from enum import Enum + + +class UrlType(Enum): + HTTP = "http" + GIT = "git" + FILESYSTEM = "filesystem" + SERVER = "server" + + +@attr.s +class MultiPlatformPath(object): + windows = attr.ib(default=None) + linux = attr.ib(default=None) + darwin = attr.ib(default=None) + + +@attr.s +class SourceInfo(object): + type = attr.ib() + + +@attr.s +class LocalSourceInfo(SourceInfo): + path = attr.ib(default=attr.Factory(MultiPlatformPath)) + + +@attr.s +class WebSourceInfo(SourceInfo): + url = attr.ib(default=None) + headers = attr.ib(default=None) + filename = attr.ib(default=None) + + +@attr.s +class ServerSourceInfo(SourceInfo): + filename = attr.ib(default=None) + path = attr.ib(default=None) + + +def convert_source(source): + """Create source object from data information. + + Args: + source (Dict[str, any]): Information about source. + + Returns: + Union[None, SourceInfo]: Object with source information if type is + known. + """ + + source_type = source.get("type") + if not source_type: + return None + + if source_type == UrlType.FILESYSTEM.value: + return LocalSourceInfo( + type=source_type, + path=source["path"] + ) + + if source_type == UrlType.HTTP.value: + url = source["path"] + return WebSourceInfo( + type=source_type, + url=url, + headers=source.get("headers"), + filename=source.get("filename") + ) + + if source_type == UrlType.SERVER.value: + return ServerSourceInfo( + type=source_type, + filename=source.get("filename"), + path=source.get("path") + ) + + +@attr.s +class VersionData(object): + version_data = attr.ib(default=None) + + +@attr.s +class AddonInfo(object): + """Object matching json payload from Server""" + name = attr.ib() + version = attr.ib() + full_name = attr.ib() + title = attr.ib(default=None) + require_distribution = attr.ib(default=False) + sources = attr.ib(default=attr.Factory(list)) + unknown_sources = attr.ib(default=attr.Factory(list)) + hash = attr.ib(default=None) + description = attr.ib(default=None) + license = attr.ib(default=None) + authors = attr.ib(default=None) + + @classmethod + def from_dict(cls, data): + sources = [] + unknown_sources = [] + + production_version = data.get("productionVersion") + if not production_version: + return None + + # server payload contains info about all versions + # active addon must have 'productionVersion' and matching version info + version_data = data.get("versions", {})[production_version] + source_info = version_data.get("clientSourceInfo") + require_distribution = source_info is not None + for source in (source_info or []): + addon_source = convert_source(source) + if addon_source is not None: + sources.append(addon_source) + else: + unknown_sources.append(source) + print(f"Unknown source {source.get('type')}") + + full_name = "{}_{}".format(data["name"], production_version) + return cls( + name=data.get("name"), + version=production_version, + full_name=full_name, + require_distribution=require_distribution, + sources=sources, + unknown_sources=unknown_sources, + hash=data.get("hash"), + description=data.get("description"), + title=data.get("title"), + license=data.get("license"), + authors=data.get("authors") + ) + + +@attr.s +class DependencyItem(object): + """Object matching payload from Server about single dependency package""" + name = attr.ib() + platform = attr.ib() + checksum = attr.ib() + require_distribution = attr.ib() + sources = attr.ib(default=attr.Factory(list)) + unknown_sources = attr.ib(default=attr.Factory(list)) + addon_list = attr.ib(default=attr.Factory(list)) + python_modules = attr.ib(default=attr.Factory(dict)) + + @classmethod + def from_dict(cls, package): + sources = [] + unknown_sources = [] + package_sources = package.get("sources") + require_distribution = package_sources is not None + for source in (package_sources or []): + dependency_source = convert_source(source) + if dependency_source is not None: + sources.append(dependency_source) + else: + print(f"Unknown source {source.get('type')}") + unknown_sources.append(source) + + addon_list = [f"{name}_{version}" + for name, version in + package.get("supportedAddons").items()] + + return cls( + name=package.get("name"), + platform=package.get("platform"), + require_distribution=require_distribution, + sources=sources, + unknown_sources=unknown_sources, + checksum=package.get("checksum"), + addon_list=addon_list, + python_modules=package.get("pythonModules") + ) diff --git a/common/openpype_common/distribution/file_handler.py b/common/ayon_common/distribution/file_handler.py similarity index 86% rename from common/openpype_common/distribution/file_handler.py rename to common/ayon_common/distribution/file_handler.py index e649f143e9..a666b014f0 100644 --- a/common/openpype_common/distribution/file_handler.py +++ b/common/ayon_common/distribution/file_handler.py @@ -62,7 +62,7 @@ class RemoteFileHandler: return True if not hash_type: raise ValueError("Provide hash type, md5 or sha256") - if hash_type == 'md5': + if hash_type == "md5": return RemoteFileHandler.check_md5(fpath, hash_value) if hash_type == "sha256": return RemoteFileHandler.check_sha256(fpath, hash_value) @@ -70,7 +70,7 @@ class RemoteFileHandler: @staticmethod def download_url( url, root, filename=None, - sha256=None, max_redirect_hops=3 + sha256=None, max_redirect_hops=3, headers=None ): """Download a file from a url and place it in root. Args: @@ -82,6 +82,7 @@ class RemoteFileHandler: If None, do not check max_redirect_hops (int, optional): Maximum number of redirect hops allowed + headers (dict): additional required headers - Authentication etc.. """ root = os.path.expanduser(root) if not filename: @@ -93,12 +94,13 @@ class RemoteFileHandler: # check if file is already present locally if RemoteFileHandler.check_integrity(fpath, sha256, hash_type="sha256"): - print('Using downloaded and verified file: ' + fpath) + print(f"Using downloaded and verified file: {fpath}") return # expand redirect chain if needed url = RemoteFileHandler._get_redirect_url(url, - max_hops=max_redirect_hops) + max_hops=max_redirect_hops, + headers=headers) # check if file is located on Google Drive file_id = RemoteFileHandler._get_google_drive_file_id(url) @@ -108,14 +110,17 @@ class RemoteFileHandler: # download the file try: - print('Downloading ' + url + ' to ' + fpath) - RemoteFileHandler._urlretrieve(url, fpath) + print(f"Downloading {url} to {fpath}") + RemoteFileHandler._urlretrieve(url, fpath, headers=headers) except (urllib.error.URLError, IOError) as e: - if url[:5] == 'https': - url = url.replace('https:', 'http:') - print('Failed download. Trying https -> http instead.' - ' Downloading ' + url + ' to ' + fpath) - RemoteFileHandler._urlretrieve(url, fpath) + if url[:5] == "https": + url = url.replace("https:", "http:") + print(( + "Failed download. Trying https -> http instead." + f" Downloading {url} to {fpath}" + )) + RemoteFileHandler._urlretrieve(url, fpath, + headers=headers) else: raise e @@ -216,11 +221,16 @@ class RemoteFileHandler: tar_file.close() @staticmethod - def _urlretrieve(url, filename, chunk_size): + def _urlretrieve(url, filename, chunk_size=None, headers=None): + final_headers = {"User-Agent": USER_AGENT} + if headers: + final_headers.update(headers) + + chunk_size = chunk_size or 8192 with open(filename, "wb") as fh: with urllib.request.urlopen( urllib.request.Request(url, - headers={"User-Agent": USER_AGENT})) \ + headers=final_headers)) \ as response: for chunk in iter(lambda: response.read(chunk_size), ""): if not chunk: @@ -228,13 +238,15 @@ class RemoteFileHandler: fh.write(chunk) @staticmethod - def _get_redirect_url(url, max_hops): + def _get_redirect_url(url, max_hops, headers=None): initial_url = url - headers = {"Method": "HEAD", "User-Agent": USER_AGENT} - + final_headers = {"Method": "HEAD", "User-Agent": USER_AGENT} + if headers: + final_headers.update(headers) for _ in range(max_hops + 1): with urllib.request.urlopen( - urllib.request.Request(url, headers=headers)) as response: + urllib.request.Request(url, + headers=final_headers)) as response: if response.url == url or response.url is None: return url diff --git a/common/openpype_common/distribution/tests/test_addon_distributtion.py b/common/ayon_common/distribution/tests/test_addon_distributtion.py similarity index 63% rename from common/openpype_common/distribution/tests/test_addon_distributtion.py rename to common/ayon_common/distribution/tests/test_addon_distributtion.py index 765ea0596a..22a347f3eb 100644 --- a/common/openpype_common/distribution/tests/test_addon_distributtion.py +++ b/common/ayon_common/distribution/tests/test_addon_distributtion.py @@ -2,29 +2,29 @@ import pytest import attr import tempfile -from common.openpype_common.distribution.addon_distribution import ( - AddonDownloader, - OSAddonDownloader, - HTTPAddonDownloader, +from common.ayon_common.distribution.addon_distribution import ( + DownloadFactory, + OSDownloader, + HTTPDownloader, AddonInfo, - update_addon_state, + AyonDistribution, UpdateState ) -from common.openpype_common.distribution.addon_info import UrlType +from common.ayon_common.distribution.addon_info import UrlType @pytest.fixture -def addon_downloader(): - addon_downloader = AddonDownloader() - addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader) - addon_downloader.register_format(UrlType.HTTP, HTTPAddonDownloader) +def addon_download_factory(): + addon_downloader = DownloadFactory() + addon_downloader.register_format(UrlType.FILESYSTEM, OSDownloader) + addon_downloader.register_format(UrlType.HTTP, HTTPDownloader) yield addon_downloader @pytest.fixture -def http_downloader(addon_downloader): - yield addon_downloader.get_downloader(UrlType.HTTP.value) +def http_downloader(addon_download_factory): + yield addon_download_factory.get_downloader(UrlType.HTTP.value) @pytest.fixture @@ -55,7 +55,8 @@ def sample_addon_info(): "clientSourceInfo": [ { "type": "http", - "url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing" # noqa + "path": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing", # noqa + "filename": "dummy.zip" }, { "type": "filesystem", @@ -84,19 +85,19 @@ def sample_addon_info(): def test_register(printer): - addon_downloader = AddonDownloader() + download_factory = DownloadFactory() - assert len(addon_downloader._downloaders) == 0, "Contains registered" + assert len(download_factory._downloaders) == 0, "Contains registered" - addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader) - assert len(addon_downloader._downloaders) == 1, "Should contain one" + download_factory.register_format(UrlType.FILESYSTEM, OSDownloader) + assert len(download_factory._downloaders) == 1, "Should contain one" -def test_get_downloader(printer, addon_downloader): - assert addon_downloader.get_downloader(UrlType.FILESYSTEM.value), "Should find" # noqa +def test_get_downloader(printer, download_factory): + assert download_factory.get_downloader(UrlType.FILESYSTEM.value), "Should find" # noqa with pytest.raises(ValueError): - addon_downloader.get_downloader("unknown"), "Shouldn't find" + download_factory.get_downloader("unknown"), "Shouldn't find" def test_addon_info(printer, sample_addon_info): @@ -147,21 +148,36 @@ def test_addon_info(printer, sample_addon_info): def test_update_addon_state(printer, sample_addon_info, - temp_folder, addon_downloader): + temp_folder, download_factory): """Tests possible cases of addon update.""" addon_info = AddonInfo.from_dict(sample_addon_info) orig_hash = addon_info.hash + # Cause crash because of invalid hash addon_info.hash = "brokenhash" - result = update_addon_state([addon_info], temp_folder, addon_downloader) - assert result["openpype_slack_1.0.0"] == UpdateState.FAILED.value, \ - "Update should failed because of wrong hash" + distribution = AyonDistribution( + temp_folder, temp_folder, download_factory, [addon_info], None + ) + distribution.distribute() + dist_items = distribution.get_addons_dist_items() + slack_state = dist_items["openpype_slack_1.0.0"].state + assert slack_state == UpdateState.UPDATE_FAILED, ( + "Update should have failed because of wrong hash") + # Fix cache and validate if was updated addon_info.hash = orig_hash - result = update_addon_state([addon_info], temp_folder, addon_downloader) - assert result["openpype_slack_1.0.0"] == UpdateState.UPDATED.value, \ - "Addon should have been updated" + distribution = AyonDistribution( + temp_folder, temp_folder, download_factory, [addon_info], None + ) + distribution.distribute() + dist_items = distribution.get_addons_dist_items() + assert dist_items["openpype_slack_1.0.0"].state == UpdateState.UPDATED, ( + "Addon should have been updated") - result = update_addon_state([addon_info], temp_folder, addon_downloader) - assert result["openpype_slack_1.0.0"] == UpdateState.EXISTS.value, \ - "Addon should already exist" + # Is UPDATED without calling distribute + distribution = AyonDistribution( + temp_folder, temp_folder, download_factory, [addon_info], None + ) + dist_items = distribution.get_addons_dist_items() + assert dist_items["openpype_slack_1.0.0"].state == UpdateState.UPDATED, ( + "Addon should already exist") diff --git a/common/ayon_common/resources/AYON.icns b/common/ayon_common/resources/AYON.icns new file mode 100644 index 0000000000..2ec66cf3e0 Binary files /dev/null and b/common/ayon_common/resources/AYON.icns differ diff --git a/common/ayon_common/resources/AYON.ico b/common/ayon_common/resources/AYON.ico new file mode 100644 index 0000000000..e0ec3292f8 Binary files /dev/null and b/common/ayon_common/resources/AYON.ico differ diff --git a/common/ayon_common/resources/AYON.png b/common/ayon_common/resources/AYON.png new file mode 100644 index 0000000000..ed13aeea52 Binary files /dev/null and b/common/ayon_common/resources/AYON.png differ diff --git a/common/ayon_common/resources/__init__.py b/common/ayon_common/resources/__init__.py new file mode 100644 index 0000000000..ca61295b94 --- /dev/null +++ b/common/ayon_common/resources/__init__.py @@ -0,0 +1,21 @@ +import os + +RESOURCES_DIR = os.path.dirname(os.path.abspath(__file__)) + + +def get_resource_path(*args): + path_items = list(args) + path_items.insert(0, RESOURCES_DIR) + return os.path.sep.join(path_items) + + +def get_icon_path(): + return get_resource_path("AYON.png") + + +def load_stylesheet(): + stylesheet_path = get_resource_path("stylesheet.css") + + with open(stylesheet_path, "r") as stream: + content = stream.read() + return content diff --git a/common/ayon_common/resources/edit.png b/common/ayon_common/resources/edit.png new file mode 100644 index 0000000000..a5a07998a6 Binary files /dev/null and b/common/ayon_common/resources/edit.png differ diff --git a/common/ayon_common/resources/eye.png b/common/ayon_common/resources/eye.png new file mode 100644 index 0000000000..5a683e2974 Binary files /dev/null and b/common/ayon_common/resources/eye.png differ diff --git a/common/ayon_common/resources/stylesheet.css b/common/ayon_common/resources/stylesheet.css new file mode 100644 index 0000000000..732f44f6d1 --- /dev/null +++ b/common/ayon_common/resources/stylesheet.css @@ -0,0 +1,84 @@ +* { + font-size: 10pt; + font-family: "Noto Sans"; + font-weight: 450; + outline: none; +} + +QWidget { + color: #D3D8DE; + background: #2C313A; + border-radius: 0px; +} + +QWidget:disabled { + color: #5b6779; +} + +QLabel { + background: transparent; +} + +QPushButton { + text-align:center center; + border: 0px solid transparent; + border-radius: 0.2em; + padding: 3px 5px 3px 5px; + background: #434a56; +} + +QPushButton:hover { + background: rgba(168, 175, 189, 0.3); + color: #F0F2F5; +} + +QPushButton:pressed {} + +QPushButton:disabled { + background: #434a56; +} + +QLineEdit { + border: 1px solid #373D48; + border-radius: 0.3em; + background: #21252B; + padding: 0.1em; +} + +QLineEdit:disabled { + background: #2C313A; +} +QLineEdit:hover { + border-color: rgba(168, 175, 189, .3); +} +QLineEdit:focus { + border-color: rgb(92, 173, 214); +} + +QLineEdit[state="invalid"] { + border-color: #AA5050; +} + +#Separator { + background: rgba(75, 83, 98, 127); +} + +#PasswordBtn { + border: none; + padding: 0.1em; + background: transparent; +} + +#PasswordBtn:hover { + background: #434a56; +} + +#LikeDisabledInput { + background: #2C313A; +} +#LikeDisabledInput:hover { + border-color: #373D48; +} +#LikeDisabledInput:focus { + border-color: #373D48; +} \ No newline at end of file diff --git a/common/ayon_common/utils.py b/common/ayon_common/utils.py new file mode 100644 index 0000000000..bbf7f01607 --- /dev/null +++ b/common/ayon_common/utils.py @@ -0,0 +1,52 @@ +import os +import appdirs + + +def get_ayon_appdirs(*args): + """Local app data directory of AYON client. + + Args: + *args (Iterable[str]): Subdirectories/files in local app data dir. + + Returns: + str: Path to directory/file in local app data dir. + """ + + return os.path.join( + appdirs.user_data_dir("ayon", "ynput"), + *args + ) + + +def _create_local_site_id(): + """Create a local site identifier.""" + from coolname import generate_slug + + new_id = generate_slug(3) + + print("Created local site id \"{}\"".format(new_id)) + + return new_id + + +def get_local_site_id(): + """Get local site identifier. + + Site id is created if does not exist yet. + """ + + # used for background syncing + site_id = os.environ.get("AYON_SITE_ID") + if site_id: + return site_id + + site_id_path = get_ayon_appdirs("site_id") + if os.path.exists(site_id_path): + with open(site_id_path, "r") as stream: + site_id = stream.read() + + if not site_id: + site_id = _create_local_site_id() + with open(site_id_path, "w") as stream: + stream.write(site_id) + return site_id diff --git a/common/openpype_common/distribution/addon_distribution.py b/common/openpype_common/distribution/addon_distribution.py deleted file mode 100644 index 5e48639dec..0000000000 --- a/common/openpype_common/distribution/addon_distribution.py +++ /dev/null @@ -1,208 +0,0 @@ -import os -from enum import Enum -from abc import abstractmethod -import attr -import logging -import requests -import platform -import shutil - -from .file_handler import RemoteFileHandler -from .addon_info import AddonInfo - - -class UpdateState(Enum): - EXISTS = "exists" - UPDATED = "updated" - FAILED = "failed" - - -class AddonDownloader: - log = logging.getLogger(__name__) - - def __init__(self): - self._downloaders = {} - - def register_format(self, downloader_type, downloader): - self._downloaders[downloader_type.value] = downloader - - def get_downloader(self, downloader_type): - downloader = self._downloaders.get(downloader_type) - if not downloader: - raise ValueError(f"{downloader_type} not implemented") - return downloader() - - @classmethod - @abstractmethod - def download(cls, source, destination): - """Returns url to downloaded addon zip file. - - Args: - source (dict): {type:"http", "url":"https://} ...} - destination (str): local folder to unzip - Returns: - (str) local path to addon zip file - """ - pass - - @classmethod - def check_hash(cls, addon_path, addon_hash): - """Compares 'hash' of downloaded 'addon_url' file. - - Args: - addon_path (str): local path to addon zip file - addon_hash (str): sha256 hash of zip file - Raises: - ValueError if hashes doesn't match - """ - if not os.path.exists(addon_path): - raise ValueError(f"{addon_path} doesn't exist.") - if not RemoteFileHandler.check_integrity(addon_path, - addon_hash, - hash_type="sha256"): - raise ValueError(f"{addon_path} doesn't match expected hash.") - - @classmethod - def unzip(cls, addon_zip_path, destination): - """Unzips local 'addon_zip_path' to 'destination'. - - Args: - addon_zip_path (str): local path to addon zip file - destination (str): local folder to unzip - """ - RemoteFileHandler.unzip(addon_zip_path, destination) - os.remove(addon_zip_path) - - @classmethod - def remove(cls, addon_url): - pass - - -class OSAddonDownloader(AddonDownloader): - - @classmethod - def download(cls, source, destination): - # OS doesnt need to download, unzip directly - addon_url = source["path"].get(platform.system().lower()) - if not os.path.exists(addon_url): - raise ValueError("{} is not accessible".format(addon_url)) - return addon_url - - -class HTTPAddonDownloader(AddonDownloader): - CHUNK_SIZE = 100000 - - @classmethod - def download(cls, source, destination): - source_url = source["url"] - cls.log.debug(f"Downloading {source_url} to {destination}") - file_name = os.path.basename(destination) - _, ext = os.path.splitext(file_name) - if (ext.replace(".", '') not - in set(RemoteFileHandler.IMPLEMENTED_ZIP_FORMATS)): - file_name += ".zip" - RemoteFileHandler.download_url(source_url, - destination, - filename=file_name) - - return os.path.join(destination, file_name) - - -def get_addons_info(server_endpoint): - """Returns list of addon information from Server""" - # TODO temp - # addon_info = AddonInfo( - # **{"name": "openpype_slack", - # "version": "1.0.0", - # "addon_url": "c:/projects/openpype_slack_1.0.0.zip", - # "type": UrlType.FILESYSTEM, - # "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa - # - # http_addon = AddonInfo( - # **{"name": "openpype_slack", - # "version": "1.0.0", - # "addon_url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing", # noqa - # "type": UrlType.HTTP, - # "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa - - response = requests.get(server_endpoint) - if not response.ok: - raise Exception(response.text) - - addons_info = [] - for addon in response.json(): - addons_info.append(AddonInfo(**addon)) - return addons_info - - -def update_addon_state(addon_infos, destination_folder, factory, - log=None): - """Loops through all 'addon_infos', compares local version, unzips. - - Loops through server provided list of dictionaries with information about - available addons. Looks if each addon is already present and deployed. - If isn't, addon zip gets downloaded and unzipped into 'destination_folder'. - Args: - addon_infos (list of AddonInfo) - destination_folder (str): local path - factory (AddonDownloader): factory to get appropriate downloader per - addon type - log (logging.Logger) - Returns: - (dict): {"addon_full_name": UpdateState.value - (eg. "exists"|"updated"|"failed") - """ - if not log: - log = logging.getLogger(__name__) - - download_states = {} - for addon in addon_infos: - full_name = "{}_{}".format(addon.name, addon.version) - addon_dest = os.path.join(destination_folder, full_name) - - if os.path.isdir(addon_dest): - log.debug(f"Addon version folder {addon_dest} already exists.") - download_states[full_name] = UpdateState.EXISTS.value - continue - - for source in addon.sources: - download_states[full_name] = UpdateState.FAILED.value - try: - downloader = factory.get_downloader(source.type) - zip_file_path = downloader.download(attr.asdict(source), - addon_dest) - downloader.check_hash(zip_file_path, addon.hash) - downloader.unzip(zip_file_path, addon_dest) - download_states[full_name] = UpdateState.UPDATED.value - break - except Exception: - log.warning(f"Error happened during updating {addon.name}", - exc_info=True) - if os.path.isdir(addon_dest): - log.debug(f"Cleaning {addon_dest}") - shutil.rmtree(addon_dest) - - return download_states - - -def check_addons(server_endpoint, addon_folder, downloaders): - """Main entry point to compare existing addons with those on server. - - Args: - server_endpoint (str): url to v4 server endpoint - addon_folder (str): local dir path for addons - downloaders (AddonDownloader): factory of downloaders - - Raises: - (RuntimeError) if any addon failed update - """ - addons_info = get_addons_info(server_endpoint) - result = update_addon_state(addons_info, - addon_folder, - downloaders) - if UpdateState.FAILED.value in result.values(): - raise RuntimeError(f"Unable to update some addons {result}") - - -def cli(*args): - raise NotImplementedError diff --git a/common/openpype_common/distribution/addon_info.py b/common/openpype_common/distribution/addon_info.py deleted file mode 100644 index 00ece11f3b..0000000000 --- a/common/openpype_common/distribution/addon_info.py +++ /dev/null @@ -1,80 +0,0 @@ -import attr -from enum import Enum - - -class UrlType(Enum): - HTTP = "http" - GIT = "git" - FILESYSTEM = "filesystem" - - -@attr.s -class MultiPlatformPath(object): - windows = attr.ib(default=None) - linux = attr.ib(default=None) - darwin = attr.ib(default=None) - - -@attr.s -class AddonSource(object): - type = attr.ib() - - -@attr.s -class LocalAddonSource(AddonSource): - path = attr.ib(default=attr.Factory(MultiPlatformPath)) - - -@attr.s -class WebAddonSource(AddonSource): - url = attr.ib(default=None) - - -@attr.s -class VersionData(object): - version_data = attr.ib(default=None) - - -@attr.s -class AddonInfo(object): - """Object matching json payload from Server""" - name = attr.ib() - version = attr.ib() - title = attr.ib(default=None) - sources = attr.ib(default=attr.Factory(dict)) - hash = attr.ib(default=None) - description = attr.ib(default=None) - license = attr.ib(default=None) - authors = attr.ib(default=None) - - @classmethod - def from_dict(cls, data): - sources = [] - - production_version = data.get("productionVersion") - if not production_version: - return - - # server payload contains info about all versions - # active addon must have 'productionVersion' and matching version info - version_data = data.get("versions", {})[production_version] - - for source in version_data.get("clientSourceInfo", []): - if source.get("type") == UrlType.FILESYSTEM.value: - source_addon = LocalAddonSource(type=source["type"], - path=source["path"]) - if source.get("type") == UrlType.HTTP.value: - source_addon = WebAddonSource(type=source["type"], - url=source["url"]) - - sources.append(source_addon) - - return cls(name=data.get("name"), - version=production_version, - sources=sources, - hash=data.get("hash"), - description=data.get("description"), - title=data.get("title"), - license=data.get("license"), - authors=data.get("authors")) - diff --git a/openpype/__init__.py b/openpype/__init__.py index 810664707a..e6b77b1853 100644 --- a/openpype/__init__.py +++ b/openpype/__init__.py @@ -3,3 +3,5 @@ import os PACKAGE_DIR = os.path.dirname(os.path.abspath(__file__)) PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins") + +AYON_SERVER_ENABLED = os.environ.get("USE_AYON_SERVER") == "1" diff --git a/openpype/client/entities.py b/openpype/client/entities.py index adbdd7a47c..5d9654c611 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -1,1553 +1,6 @@ -"""Unclear if these will have public functions like these. +from openpype import AYON_SERVER_ENABLED -Goal is that most of functions here are called on (or with) an object -that has project name as a context (e.g. on 'ProjectEntity'?). - -+ We will need more specific functions doing very specific queries really fast. -""" - -import re -import collections - -import six -from bson.objectid import ObjectId - -from .mongo import get_project_database, get_project_connection - -PatternType = type(re.compile("")) - - -def _prepare_fields(fields, required_fields=None): - if not fields: - return None - - output = { - field: True - for field in fields - } - if "_id" not in output: - output["_id"] = True - - if required_fields: - for key in required_fields: - output[key] = True - return output - - -def convert_id(in_id): - """Helper function for conversion of id from string to ObjectId. - - Args: - in_id (Union[str, ObjectId, Any]): Entity id that should be converted - to right type for queries. - - Returns: - Union[ObjectId, Any]: Converted ids to ObjectId or in type. - """ - - if isinstance(in_id, six.string_types): - return ObjectId(in_id) - return in_id - - -def convert_ids(in_ids): - """Helper function for conversion of ids from string to ObjectId. - - Args: - in_ids (Iterable[Union[str, ObjectId, Any]]): List of entity ids that - should be converted to right type for queries. - - Returns: - List[ObjectId]: Converted ids to ObjectId. - """ - - _output = set() - for in_id in in_ids: - if in_id is not None: - _output.add(convert_id(in_id)) - return list(_output) - - -def get_projects(active=True, inactive=False, fields=None): - """Yield all project entity documents. - - Args: - active (Optional[bool]): Include active projects. Defaults to True. - inactive (Optional[bool]): Include inactive projects. - Defaults to False. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Yields: - dict: Project entity data which can be reduced to specified 'fields'. - None is returned if project with specified filters was not found. - """ - mongodb = get_project_database() - for project_name in mongodb.collection_names(): - if project_name in ("system.indexes",): - continue - project_doc = get_project( - project_name, active=active, inactive=inactive, fields=fields - ) - if project_doc is not None: - yield project_doc - - -def get_project(project_name, active=True, inactive=True, fields=None): - """Return project entity document by project name. - - Args: - project_name (str): Name of project. - active (Optional[bool]): Allow active project. Defaults to True. - inactive (Optional[bool]): Allow inactive project. Defaults to True. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Project entity data which can be reduced to - specified 'fields'. None is returned if project with specified - filters was not found. - """ - # Skip if both are disabled - if not active and not inactive: - return None - - query_filter = {"type": "project"} - # Keep query untouched if both should be available - if active and inactive: - pass - - # Add filter to keep only active - elif active: - query_filter["$or"] = [ - {"data.active": {"$exists": False}}, - {"data.active": True}, - ] - - # Add filter to keep only inactive - elif inactive: - query_filter["$or"] = [ - {"data.active": {"$exists": False}}, - {"data.active": False}, - ] - - conn = get_project_connection(project_name) - return conn.find_one(query_filter, _prepare_fields(fields)) - - -def get_whole_project(project_name): - """Receive all documents from project. - - Helper that can be used to get all document from whole project. For example - for backups etc. - - Returns: - Cursor: Query cursor as iterable which returns all documents from - project collection. - """ - - conn = get_project_connection(project_name) - return conn.find({}) - - -def get_asset_by_id(project_name, asset_id, fields=None): - """Receive asset data by its id. - - Args: - project_name (str): Name of project where to look for queried entities. - asset_id (Union[str, ObjectId]): Asset's id. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Asset entity data which can be reduced to - specified 'fields'. None is returned if asset with specified - filters was not found. - """ - - asset_id = convert_id(asset_id) - if not asset_id: - return None - - query_filter = {"type": "asset", "_id": asset_id} - conn = get_project_connection(project_name) - return conn.find_one(query_filter, _prepare_fields(fields)) - - -def get_asset_by_name(project_name, asset_name, fields=None): - """Receive asset data by its name. - - Args: - project_name (str): Name of project where to look for queried entities. - asset_name (str): Asset's name. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Asset entity data which can be reduced to - specified 'fields'. None is returned if asset with specified - filters was not found. - """ - - if not asset_name: - return None - - query_filter = {"type": "asset", "name": asset_name} - conn = get_project_connection(project_name) - return conn.find_one(query_filter, _prepare_fields(fields)) - - -# NOTE this could be just public function? -# - any better variable name instead of 'standard'? -# - same approach can be used for rest of types -def _get_assets( - project_name, - asset_ids=None, - asset_names=None, - parent_ids=None, - standard=True, - archived=False, - fields=None -): - """Assets for specified project by passed filters. - - Passed filters (ids and names) are always combined so all conditions must - match. - - To receive all assets from project just keep filters empty. - - Args: - project_name (str): Name of project where to look for queried entities. - asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should - be found. - asset_names (Iterable[str]): Name assets that should be found. - parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids. - standard (bool): Query standard assets (type 'asset'). - archived (bool): Query archived assets (type 'archived_asset'). - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Cursor: Query cursor as iterable which returns asset documents matching - passed filters. - """ - - asset_types = [] - if standard: - asset_types.append("asset") - if archived: - asset_types.append("archived_asset") - - if not asset_types: - return [] - - if len(asset_types) == 1: - query_filter = {"type": asset_types[0]} - else: - query_filter = {"type": {"$in": asset_types}} - - if asset_ids is not None: - asset_ids = convert_ids(asset_ids) - if not asset_ids: - return [] - query_filter["_id"] = {"$in": asset_ids} - - if asset_names is not None: - if not asset_names: - return [] - query_filter["name"] = {"$in": list(asset_names)} - - if parent_ids is not None: - parent_ids = convert_ids(parent_ids) - if not parent_ids: - return [] - query_filter["data.visualParent"] = {"$in": parent_ids} - - conn = get_project_connection(project_name) - - return conn.find(query_filter, _prepare_fields(fields)) - - -def get_assets( - project_name, - asset_ids=None, - asset_names=None, - parent_ids=None, - archived=False, - fields=None -): - """Assets for specified project by passed filters. - - Passed filters (ids and names) are always combined so all conditions must - match. - - To receive all assets from project just keep filters empty. - - Args: - project_name (str): Name of project where to look for queried entities. - asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should - be found. - asset_names (Iterable[str]): Name assets that should be found. - parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids. - archived (bool): Add also archived assets. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Cursor: Query cursor as iterable which returns asset documents matching - passed filters. - """ - - return _get_assets( - project_name, - asset_ids, - asset_names, - parent_ids, - True, - archived, - fields - ) - - -def get_archived_assets( - project_name, - asset_ids=None, - asset_names=None, - parent_ids=None, - fields=None -): - """Archived assets for specified project by passed filters. - - Passed filters (ids and names) are always combined so all conditions must - match. - - To receive all archived assets from project just keep filters empty. - - Args: - project_name (str): Name of project where to look for queried entities. - asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should - be found. - asset_names (Iterable[str]): Name assets that should be found. - parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Cursor: Query cursor as iterable which returns asset documents matching - passed filters. - """ - - return _get_assets( - project_name, asset_ids, asset_names, parent_ids, False, True, fields - ) - - -def get_asset_ids_with_subsets(project_name, asset_ids=None): - """Find out which assets have existing subsets. - - Args: - project_name (str): Name of project where to look for queried entities. - asset_ids (Iterable[Union[str, ObjectId]]): Look only for entered - asset ids. - - Returns: - Iterable[ObjectId]: Asset ids that have existing subsets. - """ - - subset_query = { - "type": "subset" - } - if asset_ids is not None: - asset_ids = convert_ids(asset_ids) - if not asset_ids: - return [] - subset_query["parent"] = {"$in": asset_ids} - - conn = get_project_connection(project_name) - result = conn.aggregate([ - { - "$match": subset_query - }, - { - "$group": { - "_id": "$parent", - "count": {"$sum": 1} - } - } - ]) - asset_ids_with_subsets = [] - for item in result: - asset_id = item["_id"] - count = item["count"] - if count > 0: - asset_ids_with_subsets.append(asset_id) - return asset_ids_with_subsets - - -def get_subset_by_id(project_name, subset_id, fields=None): - """Single subset entity data by its id. - - Args: - project_name (str): Name of project where to look for queried entities. - subset_id (Union[str, ObjectId]): Id of subset which should be found. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Subset entity data which can be reduced to - specified 'fields'. None is returned if subset with specified - filters was not found. - """ - - subset_id = convert_id(subset_id) - if not subset_id: - return None - - query_filters = {"type": "subset", "_id": subset_id} - conn = get_project_connection(project_name) - return conn.find_one(query_filters, _prepare_fields(fields)) - - -def get_subset_by_name(project_name, subset_name, asset_id, fields=None): - """Single subset entity data by its name and its version id. - - Args: - project_name (str): Name of project where to look for queried entities. - subset_name (str): Name of subset. - asset_id (Union[str, ObjectId]): Id of parent asset. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Subset entity data which can be reduced to - specified 'fields'. None is returned if subset with specified - filters was not found. - """ - if not subset_name: - return None - - asset_id = convert_id(asset_id) - if not asset_id: - return None - - query_filters = { - "type": "subset", - "name": subset_name, - "parent": asset_id - } - conn = get_project_connection(project_name) - return conn.find_one(query_filters, _prepare_fields(fields)) - - -def get_subsets( - project_name, - subset_ids=None, - subset_names=None, - asset_ids=None, - names_by_asset_ids=None, - archived=False, - fields=None -): - """Subset entities data from one project filtered by entered filters. - - Filters are additive (all conditions must pass to return subset). - - Args: - project_name (str): Name of project where to look for queried entities. - subset_ids (Iterable[Union[str, ObjectId]]): Subset ids that should be - queried. Filter ignored if 'None' is passed. - subset_names (Iterable[str]): Subset names that should be queried. - Filter ignored if 'None' is passed. - asset_ids (Iterable[Union[str, ObjectId]]): Asset ids under which - should look for the subsets. Filter ignored if 'None' is passed. - names_by_asset_ids (dict[ObjectId, List[str]]): Complex filtering - using asset ids and list of subset names under the asset. - archived (bool): Look for archived subsets too. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Cursor: Iterable cursor yielding all matching subsets. - """ - - subset_types = ["subset"] - if archived: - subset_types.append("archived_subset") - - if len(subset_types) == 1: - query_filter = {"type": subset_types[0]} - else: - query_filter = {"type": {"$in": subset_types}} - - if asset_ids is not None: - asset_ids = convert_ids(asset_ids) - if not asset_ids: - return [] - query_filter["parent"] = {"$in": asset_ids} - - if subset_ids is not None: - subset_ids = convert_ids(subset_ids) - if not subset_ids: - return [] - query_filter["_id"] = {"$in": subset_ids} - - if subset_names is not None: - if not subset_names: - return [] - query_filter["name"] = {"$in": list(subset_names)} - - if names_by_asset_ids is not None: - or_query = [] - for asset_id, names in names_by_asset_ids.items(): - if asset_id and names: - or_query.append({ - "parent": convert_id(asset_id), - "name": {"$in": list(names)} - }) - if not or_query: - return [] - query_filter["$or"] = or_query - - conn = get_project_connection(project_name) - return conn.find(query_filter, _prepare_fields(fields)) - - -def get_subset_families(project_name, subset_ids=None): - """Set of main families of subsets. - - Args: - project_name (str): Name of project where to look for queried entities. - subset_ids (Iterable[Union[str, ObjectId]]): Subset ids that should - be queried. All subsets from project are used if 'None' is passed. - - Returns: - set[str]: Main families of matching subsets. - """ - - subset_filter = { - "type": "subset" - } - if subset_ids is not None: - if not subset_ids: - return set() - subset_filter["_id"] = {"$in": list(subset_ids)} - - conn = get_project_connection(project_name) - result = list(conn.aggregate([ - {"$match": subset_filter}, - {"$project": { - "family": {"$arrayElemAt": ["$data.families", 0]} - }}, - {"$group": { - "_id": "family_group", - "families": {"$addToSet": "$family"} - }} - ])) - if result: - return set(result[0]["families"]) - return set() - - -def get_version_by_id(project_name, version_id, fields=None): - """Single version entity data by its id. - - Args: - project_name (str): Name of project where to look for queried entities. - version_id (Union[str, ObjectId]): Id of version which should be found. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Version entity data which can be reduced to - specified 'fields'. None is returned if version with specified - filters was not found. - """ - - version_id = convert_id(version_id) - if not version_id: - return None - - query_filter = { - "type": {"$in": ["version", "hero_version"]}, - "_id": version_id - } - conn = get_project_connection(project_name) - return conn.find_one(query_filter, _prepare_fields(fields)) - - -def get_version_by_name(project_name, version, subset_id, fields=None): - """Single version entity data by its name and subset id. - - Args: - project_name (str): Name of project where to look for queried entities. - version (int): name of version entity (its version). - subset_id (Union[str, ObjectId]): Id of version which should be found. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Version entity data which can be reduced to - specified 'fields'. None is returned if version with specified - filters was not found. - """ - - subset_id = convert_id(subset_id) - if not subset_id: - return None - - conn = get_project_connection(project_name) - query_filter = { - "type": "version", - "parent": subset_id, - "name": version - } - return conn.find_one(query_filter, _prepare_fields(fields)) - - -def version_is_latest(project_name, version_id): - """Is version the latest from its subset. - - Note: - Hero versions are considered as latest. - - Todo: - Maybe raise exception when version was not found? - - Args: - project_name (str):Name of project where to look for queried entities. - version_id (Union[str, ObjectId]): Version id which is checked. - - Returns: - bool: True if is latest version from subset else False. - """ - - version_id = convert_id(version_id) - if not version_id: - return False - version_doc = get_version_by_id( - project_name, version_id, fields=["_id", "type", "parent"] - ) - # What to do when version is not found? - if not version_doc: - return False - - if version_doc["type"] == "hero_version": - return True - - last_version = get_last_version_by_subset_id( - project_name, version_doc["parent"], fields=["_id"] - ) - return last_version["_id"] == version_id - - -def _get_versions( - project_name, - subset_ids=None, - version_ids=None, - versions=None, - standard=True, - hero=False, - fields=None -): - version_types = [] - if standard: - version_types.append("version") - - if hero: - version_types.append("hero_version") - - if not version_types: - return [] - elif len(version_types) == 1: - query_filter = {"type": version_types[0]} - else: - query_filter = {"type": {"$in": version_types}} - - if subset_ids is not None: - subset_ids = convert_ids(subset_ids) - if not subset_ids: - return [] - query_filter["parent"] = {"$in": subset_ids} - - if version_ids is not None: - version_ids = convert_ids(version_ids) - if not version_ids: - return [] - query_filter["_id"] = {"$in": version_ids} - - if versions is not None: - versions = list(versions) - if not versions: - return [] - - if len(versions) == 1: - query_filter["name"] = versions[0] - else: - query_filter["name"] = {"$in": versions} - - conn = get_project_connection(project_name) - - return conn.find(query_filter, _prepare_fields(fields)) - - -def get_versions( - project_name, - version_ids=None, - subset_ids=None, - versions=None, - hero=False, - fields=None -): - """Version entities data from one project filtered by entered filters. - - Filters are additive (all conditions must pass to return subset). - - Args: - project_name (str): Name of project where to look for queried entities. - version_ids (Iterable[Union[str, ObjectId]]): Version ids that will - be queried. Filter ignored if 'None' is passed. - subset_ids (Iterable[str]): Subset ids that will be queried. - Filter ignored if 'None' is passed. - versions (Iterable[int]): Version names (as integers). - Filter ignored if 'None' is passed. - hero (bool): Look also for hero versions. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Cursor: Iterable cursor yielding all matching versions. - """ - - return _get_versions( - project_name, - subset_ids, - version_ids, - versions, - standard=True, - hero=hero, - fields=fields - ) - - -def get_hero_version_by_subset_id(project_name, subset_id, fields=None): - """Hero version by subset id. - - Args: - project_name (str): Name of project where to look for queried entities. - subset_id (Union[str, ObjectId]): Subset id under which - is hero version. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Hero version entity data which can be reduced to - specified 'fields'. None is returned if hero version with specified - filters was not found. - """ - - subset_id = convert_id(subset_id) - if not subset_id: - return None - - versions = list(_get_versions( - project_name, - subset_ids=[subset_id], - standard=False, - hero=True, - fields=fields - )) - if versions: - return versions[0] - return None - - -def get_hero_version_by_id(project_name, version_id, fields=None): - """Hero version by its id. - - Args: - project_name (str): Name of project where to look for queried entities. - version_id (Union[str, ObjectId]): Hero version id. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Hero version entity data which can be reduced to - specified 'fields'. None is returned if hero version with specified - filters was not found. - """ - - version_id = convert_id(version_id) - if not version_id: - return None - - versions = list(_get_versions( - project_name, - version_ids=[version_id], - standard=False, - hero=True, - fields=fields - )) - if versions: - return versions[0] - return None - - -def get_hero_versions( - project_name, - subset_ids=None, - version_ids=None, - fields=None -): - """Hero version entities data from one project filtered by entered filters. - - Args: - project_name (str): Name of project where to look for queried entities. - subset_ids (Iterable[Union[str, ObjectId]]): Subset ids for which - should look for hero versions. Filter ignored if 'None' is passed. - version_ids (Iterable[Union[str, ObjectId]]): Hero version ids. Filter - ignored if 'None' is passed. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Cursor|list: Iterable yielding hero versions matching passed filters. - """ - - return _get_versions( - project_name, - subset_ids, - version_ids, - standard=False, - hero=True, - fields=fields - ) - - -def get_output_link_versions(project_name, version_id, fields=None): - """Versions where passed version was used as input. - - Question: - Not 100% sure about the usage of the function so the name and docstring - maybe does not match what it does? - - Args: - project_name (str): Name of project where to look for queried entities. - version_id (Union[str, ObjectId]): Version id which can be used - as input link for other versions. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Iterable: Iterable cursor yielding versions that are used as input - links for passed version. - """ - - version_id = convert_id(version_id) - if not version_id: - return [] - - conn = get_project_connection(project_name) - # Does make sense to look for hero versions? - query_filter = { - "type": "version", - "data.inputLinks.id": version_id - } - return conn.find(query_filter, _prepare_fields(fields)) - - -def get_last_versions(project_name, subset_ids, active=None, fields=None): - """Latest versions for entered subset_ids. - - Args: - project_name (str): Name of project where to look for queried entities. - subset_ids (Iterable[Union[str, ObjectId]]): List of subset ids. - active (Optional[bool]): If True only active versions are returned. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - dict[ObjectId, int]: Key is subset id and value is last version name. - """ - - subset_ids = convert_ids(subset_ids) - if not subset_ids: - return {} - - if fields is not None: - fields = list(fields) - if not fields: - return {} - - # Avoid double query if only name and _id are requested - name_needed = False - limit_query = False - if fields: - fields_s = set(fields) - if "name" in fields_s: - name_needed = True - fields_s.remove("name") - - for field in ("_id", "parent"): - if field in fields_s: - fields_s.remove(field) - limit_query = len(fields_s) == 0 - - group_item = { - "_id": "$parent", - "_version_id": {"$last": "$_id"} - } - # Add name if name is needed (only for limit query) - if name_needed: - group_item["name"] = {"$last": "$name"} - - aggregate_filter = { - "type": "version", - "parent": {"$in": subset_ids} - } - if active is False: - aggregate_filter["data.active"] = active - elif active is True: - aggregate_filter["$or"] = [ - {"data.active": {"$exists": 0}}, - {"data.active": active}, - ] - - aggregation_pipeline = [ - # Find all versions of those subsets - {"$match": aggregate_filter}, - # Sorting versions all together - {"$sort": {"name": 1}}, - # Group them by "parent", but only take the last - {"$group": group_item} - ] - - conn = get_project_connection(project_name) - aggregate_result = conn.aggregate(aggregation_pipeline) - if limit_query: - output = {} - for item in aggregate_result: - subset_id = item["_id"] - item_data = {"_id": item["_version_id"], "parent": subset_id} - if name_needed: - item_data["name"] = item["name"] - output[subset_id] = item_data - return output - - version_ids = [ - doc["_version_id"] - for doc in aggregate_result - ] - - fields = _prepare_fields(fields, ["parent"]) - - version_docs = get_versions( - project_name, version_ids=version_ids, fields=fields - ) - - return { - version_doc["parent"]: version_doc - for version_doc in version_docs - } - - -def get_last_version_by_subset_id(project_name, subset_id, fields=None): - """Last version for passed subset id. - - Args: - project_name (str): Name of project where to look for queried entities. - subset_id (Union[str, ObjectId]): Id of version which should be found. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Version entity data which can be reduced to - specified 'fields'. None is returned if version with specified - filters was not found. - """ - - subset_id = convert_id(subset_id) - if not subset_id: - return None - - last_versions = get_last_versions( - project_name, subset_ids=[subset_id], fields=fields - ) - return last_versions.get(subset_id) - - -def get_last_version_by_subset_name( - project_name, subset_name, asset_id=None, asset_name=None, fields=None -): - """Last version for passed subset name under asset id/name. - - It is required to pass 'asset_id' or 'asset_name'. Asset id is recommended - if is available. - - Args: - project_name (str): Name of project where to look for queried entities. - subset_name (str): Name of subset. - asset_id (Union[str, ObjectId]): Asset id which is parent of passed - subset name. - asset_name (str): Asset name which is parent of passed subset name. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Version entity data which can be reduced to - specified 'fields'. None is returned if version with specified - filters was not found. - """ - - if not asset_id and not asset_name: - return None - - if not asset_id: - asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) - if not asset_doc: - return None - asset_id = asset_doc["_id"] - subset_doc = get_subset_by_name( - project_name, subset_name, asset_id, fields=["_id"] - ) - if not subset_doc: - return None - return get_last_version_by_subset_id( - project_name, subset_doc["_id"], fields=fields - ) - - -def get_representation_by_id(project_name, representation_id, fields=None): - """Representation entity data by its id. - - Args: - project_name (str): Name of project where to look for queried entities. - representation_id (Union[str, ObjectId]): Representation id. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Representation entity data which can be reduced to - specified 'fields'. None is returned if representation with - specified filters was not found. - """ - - if not representation_id: - return None - - repre_types = ["representation", "archived_representation"] - query_filter = { - "type": {"$in": repre_types} - } - if representation_id is not None: - query_filter["_id"] = convert_id(representation_id) - - conn = get_project_connection(project_name) - - return conn.find_one(query_filter, _prepare_fields(fields)) - - -def get_representation_by_name( - project_name, representation_name, version_id, fields=None -): - """Representation entity data by its name and its version id. - - Args: - project_name (str): Name of project where to look for queried entities. - representation_name (str): Representation name. - version_id (Union[str, ObjectId]): Id of parent version entity. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[dict[str, Any], None]: Representation entity data which can be - reduced to specified 'fields'. None is returned if representation - with specified filters was not found. - """ - - version_id = convert_id(version_id) - if not version_id or not representation_name: - return None - repre_types = ["representation", "archived_representations"] - query_filter = { - "type": {"$in": repre_types}, - "name": representation_name, - "parent": version_id - } - - conn = get_project_connection(project_name) - return conn.find_one(query_filter, _prepare_fields(fields)) - - -def _flatten_dict(data): - flatten_queue = collections.deque() - flatten_queue.append(data) - output = {} - while flatten_queue: - item = flatten_queue.popleft() - for key, value in item.items(): - if not isinstance(value, dict): - output[key] = value - continue - - tmp = {} - for subkey, subvalue in value.items(): - new_key = "{}.{}".format(key, subkey) - tmp[new_key] = subvalue - flatten_queue.append(tmp) - return output - - -def _regex_filters(filters): - output = [] - for key, value in filters.items(): - regexes = [] - a_values = [] - if isinstance(value, PatternType): - regexes.append(value) - elif isinstance(value, (list, tuple, set)): - for item in value: - if isinstance(item, PatternType): - regexes.append(item) - else: - a_values.append(item) - else: - a_values.append(value) - - key_filters = [] - if len(a_values) == 1: - key_filters.append({key: a_values[0]}) - elif a_values: - key_filters.append({key: {"$in": a_values}}) - - for regex in regexes: - key_filters.append({key: {"$regex": regex}}) - - if len(key_filters) == 1: - output.append(key_filters[0]) - else: - output.append({"$or": key_filters}) - - return output - - -def _get_representations( - project_name, - representation_ids, - representation_names, - version_ids, - context_filters, - names_by_version_ids, - standard, - archived, - fields -): - default_output = [] - repre_types = [] - if standard: - repre_types.append("representation") - if archived: - repre_types.append("archived_representation") - - if not repre_types: - return default_output - - if len(repre_types) == 1: - query_filter = {"type": repre_types[0]} - else: - query_filter = {"type": {"$in": repre_types}} - - if representation_ids is not None: - representation_ids = convert_ids(representation_ids) - if not representation_ids: - return default_output - query_filter["_id"] = {"$in": representation_ids} - - if representation_names is not None: - if not representation_names: - return default_output - query_filter["name"] = {"$in": list(representation_names)} - - if version_ids is not None: - version_ids = convert_ids(version_ids) - if not version_ids: - return default_output - query_filter["parent"] = {"$in": version_ids} - - or_queries = [] - if names_by_version_ids is not None: - or_query = [] - for version_id, names in names_by_version_ids.items(): - if version_id and names: - or_query.append({ - "parent": convert_id(version_id), - "name": {"$in": list(names)} - }) - if not or_query: - return default_output - or_queries.append(or_query) - - if context_filters is not None: - if not context_filters: - return [] - _flatten_filters = _flatten_dict(context_filters) - flatten_filters = {} - for key, value in _flatten_filters.items(): - if not key.startswith("context"): - key = "context.{}".format(key) - flatten_filters[key] = value - - for item in _regex_filters(flatten_filters): - for key, value in item.items(): - if key != "$or": - query_filter[key] = value - - elif value: - or_queries.append(value) - - if len(or_queries) == 1: - query_filter["$or"] = or_queries[0] - elif or_queries: - and_query = [] - for or_query in or_queries: - if isinstance(or_query, list): - or_query = {"$or": or_query} - and_query.append(or_query) - query_filter["$and"] = and_query - - conn = get_project_connection(project_name) - - return conn.find(query_filter, _prepare_fields(fields)) - - -def get_representations( - project_name, - representation_ids=None, - representation_names=None, - version_ids=None, - context_filters=None, - names_by_version_ids=None, - archived=False, - standard=True, - fields=None -): - """Representation entities data from one project filtered by filters. - - Filters are additive (all conditions must pass to return subset). - - Args: - project_name (str): Name of project where to look for queried entities. - representation_ids (Iterable[Union[str, ObjectId]]): Representation ids - used as filter. Filter ignored if 'None' is passed. - representation_names (Iterable[str]): Representations names used - as filter. Filter ignored if 'None' is passed. - version_ids (Iterable[str]): Subset ids used as parent filter. Filter - ignored if 'None' is passed. - context_filters (Dict[str, List[str, PatternType]]): Filter by - representation context fields. - names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering - using version ids and list of names under the version. - archived (bool): Output will also contain archived representations. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Cursor: Iterable cursor yielding all matching representations. - """ - - return _get_representations( - project_name=project_name, - representation_ids=representation_ids, - representation_names=representation_names, - version_ids=version_ids, - context_filters=context_filters, - names_by_version_ids=names_by_version_ids, - standard=standard, - archived=archived, - fields=fields - ) - - -def get_archived_representations( - project_name, - representation_ids=None, - representation_names=None, - version_ids=None, - context_filters=None, - names_by_version_ids=None, - fields=None -): - """Archived representation entities data from project with applied filters. - - Filters are additive (all conditions must pass to return subset). - - Args: - project_name (str): Name of project where to look for queried entities. - representation_ids (Iterable[Union[str, ObjectId]]): Representation ids - used as filter. Filter ignored if 'None' is passed. - representation_names (Iterable[str]): Representations names used - as filter. Filter ignored if 'None' is passed. - version_ids (Iterable[str]): Subset ids used as parent filter. Filter - ignored if 'None' is passed. - context_filters (Dict[str, List[str, PatternType]]): Filter by - representation context fields. - names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering - using version ids and list of names under the version. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Cursor: Iterable cursor yielding all matching representations. - """ - - return _get_representations( - project_name=project_name, - representation_ids=representation_ids, - representation_names=representation_names, - version_ids=version_ids, - context_filters=context_filters, - names_by_version_ids=names_by_version_ids, - standard=False, - archived=True, - fields=fields - ) - - -def get_representations_parents(project_name, representations): - """Prepare parents of representation entities. - - Each item of returned dictionary contains version, subset, asset - and project in that order. - - Args: - project_name (str): Name of project where to look for queried entities. - representations (List[dict]): Representation entities with at least - '_id' and 'parent' keys. - - Returns: - dict[ObjectId, tuple]: Parents by representation id. - """ - - repre_docs_by_version_id = collections.defaultdict(list) - version_docs_by_version_id = {} - version_docs_by_subset_id = collections.defaultdict(list) - subset_docs_by_subset_id = {} - subset_docs_by_asset_id = collections.defaultdict(list) - output = {} - for repre_doc in representations: - repre_id = repre_doc["_id"] - version_id = repre_doc["parent"] - output[repre_id] = (None, None, None, None) - repre_docs_by_version_id[version_id].append(repre_doc) - - version_docs = get_versions( - project_name, - version_ids=repre_docs_by_version_id.keys(), - hero=True - ) - for version_doc in version_docs: - version_id = version_doc["_id"] - subset_id = version_doc["parent"] - version_docs_by_version_id[version_id] = version_doc - version_docs_by_subset_id[subset_id].append(version_doc) - - subset_docs = get_subsets( - project_name, subset_ids=version_docs_by_subset_id.keys() - ) - for subset_doc in subset_docs: - subset_id = subset_doc["_id"] - asset_id = subset_doc["parent"] - subset_docs_by_subset_id[subset_id] = subset_doc - subset_docs_by_asset_id[asset_id].append(subset_doc) - - asset_docs = get_assets( - project_name, asset_ids=subset_docs_by_asset_id.keys() - ) - asset_docs_by_id = { - asset_doc["_id"]: asset_doc - for asset_doc in asset_docs - } - - project_doc = get_project(project_name) - - for version_id, repre_docs in repre_docs_by_version_id.items(): - asset_doc = None - subset_doc = None - version_doc = version_docs_by_version_id.get(version_id) - if version_doc: - subset_id = version_doc["parent"] - subset_doc = subset_docs_by_subset_id.get(subset_id) - if subset_doc: - asset_id = subset_doc["parent"] - asset_doc = asset_docs_by_id.get(asset_id) - - for repre_doc in repre_docs: - repre_id = repre_doc["_id"] - output[repre_id] = ( - version_doc, subset_doc, asset_doc, project_doc - ) - return output - - -def get_representation_parents(project_name, representation): - """Prepare parents of representation entity. - - Each item of returned dictionary contains version, subset, asset - and project in that order. - - Args: - project_name (str): Name of project where to look for queried entities. - representation (dict): Representation entities with at least - '_id' and 'parent' keys. - - Returns: - dict[ObjectId, tuple]: Parents by representation id. - """ - - if not representation: - return None - - repre_id = representation["_id"] - parents_by_repre_id = get_representations_parents( - project_name, [representation] - ) - return parents_by_repre_id[repre_id] - - -def get_thumbnail_id_from_source(project_name, src_type, src_id): - """Receive thumbnail id from source entity. - - Args: - project_name (str): Name of project where to look for queried entities. - src_type (str): Type of source entity ('asset', 'version'). - src_id (Union[str, ObjectId]): Id of source entity. - - Returns: - Union[ObjectId, None]: Thumbnail id assigned to entity. If Source - entity does not have any thumbnail id assigned. - """ - - if not src_type or not src_id: - return None - - query_filter = {"_id": convert_id(src_id)} - - conn = get_project_connection(project_name) - src_doc = conn.find_one(query_filter, {"data.thumbnail_id"}) - if src_doc: - return src_doc.get("data", {}).get("thumbnail_id") - return None - - -def get_thumbnails(project_name, thumbnail_ids, fields=None): - """Receive thumbnails entity data. - - Thumbnail entity can be used to receive binary content of thumbnail based - on its content and ThumbnailResolvers. - - Args: - project_name (str): Name of project where to look for queried entities. - thumbnail_ids (Iterable[Union[str, ObjectId]]): Ids of thumbnail - entities. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - cursor: Cursor of queried documents. - """ - - if thumbnail_ids: - thumbnail_ids = convert_ids(thumbnail_ids) - - if not thumbnail_ids: - return [] - query_filter = { - "type": "thumbnail", - "_id": {"$in": thumbnail_ids} - } - conn = get_project_connection(project_name) - return conn.find(query_filter, _prepare_fields(fields)) - - -def get_thumbnail(project_name, thumbnail_id, fields=None): - """Receive thumbnail entity data. - - Args: - project_name (str): Name of project where to look for queried entities. - thumbnail_id (Union[str, ObjectId]): Id of thumbnail entity. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Thumbnail entity data which can be reduced to - specified 'fields'.None is returned if thumbnail with specified - filters was not found. - """ - - if not thumbnail_id: - return None - query_filter = {"type": "thumbnail", "_id": convert_id(thumbnail_id)} - conn = get_project_connection(project_name) - return conn.find_one(query_filter, _prepare_fields(fields)) - - -def get_workfile_info( - project_name, asset_id, task_name, filename, fields=None -): - """Document with workfile information. - - Warning: - Query is based on filename and context which does not meant it will - find always right and expected result. Information have limited usage - and is not recommended to use it as source information about workfile. - - Args: - project_name (str): Name of project where to look for queried entities. - asset_id (Union[str, ObjectId]): Id of asset entity. - task_name (str): Task name on asset. - fields (Optional[Iterable[str]]): Fields that should be returned. All - fields are returned if 'None' is passed. - - Returns: - Union[Dict, None]: Workfile entity data which can be reduced to - specified 'fields'.None is returned if workfile with specified - filters was not found. - """ - - if not asset_id or not task_name or not filename: - return None - - query_filter = { - "type": "workfile", - "parent": convert_id(asset_id), - "task_name": task_name, - "filename": filename - } - conn = get_project_connection(project_name) - return conn.find_one(query_filter, _prepare_fields(fields)) - - -""" -## Custom data storage: -- Settings - OP settings overrides and local settings -- Logging - logs from Logger -- Webpublisher - jobs -- Ftrack - events -- Maya - Shaders - - openpype/hosts/maya/api/shader_definition_editor.py - - openpype/hosts/maya/plugins/publish/validate_model_name.py - -## Global publish plugins -- openpype/plugins/publish/extract_hierarchy_avalon.py - Create: - - asset - Update: - - asset - -## Lib -- openpype/lib/avalon_context.py - Update: - - workfile data -- openpype/lib/project_backpack.py - Update: - - project -""" +if not AYON_SERVER_ENABLED: + from .mongo.entities import * +else: + from .server.entities import * diff --git a/openpype/client/entity_links.py b/openpype/client/entity_links.py index b74b4ce7f6..e18970de90 100644 --- a/openpype/client/entity_links.py +++ b/openpype/client/entity_links.py @@ -1,243 +1,6 @@ -from .mongo import get_project_connection -from .entities import ( - get_assets, - get_asset_by_id, - get_version_by_id, - get_representation_by_id, - convert_id, -) +from openpype import AYON_SERVER_ENABLED - -def get_linked_asset_ids(project_name, asset_doc=None, asset_id=None): - """Extract linked asset ids from asset document. - - One of asset document or asset id must be passed. - - Note: - Asset links now works only from asset to assets. - - Args: - asset_doc (dict): Asset document from DB. - - Returns: - List[Union[ObjectId, str]]: Asset ids of input links. - """ - - output = [] - if not asset_doc and not asset_id: - return output - - if not asset_doc: - asset_doc = get_asset_by_id( - project_name, asset_id, fields=["data.inputLinks"] - ) - - input_links = asset_doc["data"].get("inputLinks") - if not input_links: - return output - - for item in input_links: - # Backwards compatibility for "_id" key which was replaced with - # "id" - if "_id" in item: - link_id = item["_id"] - else: - link_id = item["id"] - output.append(link_id) - return output - - -def get_linked_assets( - project_name, asset_doc=None, asset_id=None, fields=None -): - """Return linked assets based on passed asset document. - - One of asset document or asset id must be passed. - - Args: - project_name (str): Name of project where to look for queried entities. - asset_doc (Dict[str, Any]): Asset document from database. - asset_id (Union[ObjectId, str]): Asset id. Can be used instead of - asset document. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. - - Returns: - List[Dict[str, Any]]: Asset documents of input links for passed - asset doc. - """ - - if not asset_doc: - if not asset_id: - return [] - asset_doc = get_asset_by_id( - project_name, - asset_id, - fields=["data.inputLinks"] - ) - if not asset_doc: - return [] - - link_ids = get_linked_asset_ids(project_name, asset_doc=asset_doc) - if not link_ids: - return [] - - return list(get_assets(project_name, asset_ids=link_ids, fields=fields)) - - -def get_linked_representation_id( - project_name, repre_doc=None, repre_id=None, link_type=None, max_depth=None -): - """Returns list of linked ids of particular type (if provided). - - One of representation document or representation id must be passed. - Note: - Representation links now works only from representation through version - back to representations. - - Args: - project_name (str): Name of project where look for links. - repre_doc (Dict[str, Any]): Representation document. - repre_id (Union[ObjectId, str]): Representation id. - link_type (str): Type of link (e.g. 'reference', ...). - max_depth (int): Limit recursion level. Default: 0 - - Returns: - List[ObjectId] Linked representation ids. - """ - - if repre_doc: - repre_id = repre_doc["_id"] - - if repre_id: - repre_id = convert_id(repre_id) - - if not repre_id and not repre_doc: - return [] - - version_id = None - if repre_doc: - version_id = repre_doc.get("parent") - - if not version_id: - repre_doc = get_representation_by_id( - project_name, repre_id, fields=["parent"] - ) - version_id = repre_doc["parent"] - - if not version_id: - return [] - - version_doc = get_version_by_id( - project_name, version_id, fields=["type", "version_id"] - ) - if version_doc["type"] == "hero_version": - version_id = version_doc["version_id"] - - if max_depth is None: - max_depth = 0 - - match = { - "_id": version_id, - # Links are not stored to hero versions at this moment so filter - # is limited to just versions - "type": "version" - } - - graph_lookup = { - "from": project_name, - "startWith": "$data.inputLinks.id", - "connectFromField": "data.inputLinks.id", - "connectToField": "_id", - "as": "outputs_recursive", - "depthField": "depth" - } - if max_depth != 0: - # We offset by -1 since 0 basically means no recursion - # but the recursion only happens after the initial lookup - # for outputs. - graph_lookup["maxDepth"] = max_depth - 1 - - query_pipeline = [ - # Match - {"$match": match}, - # Recursive graph lookup for inputs - {"$graphLookup": graph_lookup} - ] - conn = get_project_connection(project_name) - result = conn.aggregate(query_pipeline) - referenced_version_ids = _process_referenced_pipeline_result( - result, link_type - ) - if not referenced_version_ids: - return [] - - ref_ids = conn.distinct( - "_id", - filter={ - "parent": {"$in": list(referenced_version_ids)}, - "type": "representation" - } - ) - - return list(ref_ids) - - -def _process_referenced_pipeline_result(result, link_type): - """Filters result from pipeline for particular link_type. - - Pipeline cannot use link_type directly in a query. - - Returns: - (list) - """ - - referenced_version_ids = set() - correctly_linked_ids = set() - for item in result: - input_links = item.get("data", {}).get("inputLinks") - if not input_links: - continue - - _filter_input_links( - input_links, - link_type, - correctly_linked_ids - ) - - # outputs_recursive in random order, sort by depth - outputs_recursive = item.get("outputs_recursive") - if not outputs_recursive: - continue - - for output in sorted(outputs_recursive, key=lambda o: o["depth"]): - output_links = output.get("data", {}).get("inputLinks") - if not output_links and output["type"] != "hero_version": - continue - - # Leaf - if output["_id"] not in correctly_linked_ids: - continue - - _filter_input_links( - output_links, - link_type, - correctly_linked_ids - ) - - referenced_version_ids.add(output["_id"]) - - return referenced_version_ids - - -def _filter_input_links(input_links, link_type, correctly_linked_ids): - if not input_links: # to handle hero versions - return - - for input_link in input_links: - if link_type and input_link["type"] != link_type: - continue - - link_id = input_link.get("id") or input_link.get("_id") - if link_id is not None: - correctly_linked_ids.add(link_id) +if not AYON_SERVER_ENABLED: + from .mongo.entity_links import * +else: + from .server.entity_links import * diff --git a/openpype/client/mongo/__init__.py b/openpype/client/mongo/__init__.py new file mode 100644 index 0000000000..5c5143a731 --- /dev/null +++ b/openpype/client/mongo/__init__.py @@ -0,0 +1,20 @@ +from .mongo import ( + MongoEnvNotSet, + get_default_components, + should_add_certificate_path_to_mongo_url, + validate_mongo_connection, + OpenPypeMongoConnection, + get_project_database, + get_project_connection, +) + + +__all__ = ( + "MongoEnvNotSet", + "get_default_components", + "should_add_certificate_path_to_mongo_url", + "validate_mongo_connection", + "OpenPypeMongoConnection", + "get_project_database", + "get_project_connection", +) diff --git a/openpype/client/mongo/entities.py b/openpype/client/mongo/entities.py new file mode 100644 index 0000000000..adbdd7a47c --- /dev/null +++ b/openpype/client/mongo/entities.py @@ -0,0 +1,1553 @@ +"""Unclear if these will have public functions like these. + +Goal is that most of functions here are called on (or with) an object +that has project name as a context (e.g. on 'ProjectEntity'?). + ++ We will need more specific functions doing very specific queries really fast. +""" + +import re +import collections + +import six +from bson.objectid import ObjectId + +from .mongo import get_project_database, get_project_connection + +PatternType = type(re.compile("")) + + +def _prepare_fields(fields, required_fields=None): + if not fields: + return None + + output = { + field: True + for field in fields + } + if "_id" not in output: + output["_id"] = True + + if required_fields: + for key in required_fields: + output[key] = True + return output + + +def convert_id(in_id): + """Helper function for conversion of id from string to ObjectId. + + Args: + in_id (Union[str, ObjectId, Any]): Entity id that should be converted + to right type for queries. + + Returns: + Union[ObjectId, Any]: Converted ids to ObjectId or in type. + """ + + if isinstance(in_id, six.string_types): + return ObjectId(in_id) + return in_id + + +def convert_ids(in_ids): + """Helper function for conversion of ids from string to ObjectId. + + Args: + in_ids (Iterable[Union[str, ObjectId, Any]]): List of entity ids that + should be converted to right type for queries. + + Returns: + List[ObjectId]: Converted ids to ObjectId. + """ + + _output = set() + for in_id in in_ids: + if in_id is not None: + _output.add(convert_id(in_id)) + return list(_output) + + +def get_projects(active=True, inactive=False, fields=None): + """Yield all project entity documents. + + Args: + active (Optional[bool]): Include active projects. Defaults to True. + inactive (Optional[bool]): Include inactive projects. + Defaults to False. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Yields: + dict: Project entity data which can be reduced to specified 'fields'. + None is returned if project with specified filters was not found. + """ + mongodb = get_project_database() + for project_name in mongodb.collection_names(): + if project_name in ("system.indexes",): + continue + project_doc = get_project( + project_name, active=active, inactive=inactive, fields=fields + ) + if project_doc is not None: + yield project_doc + + +def get_project(project_name, active=True, inactive=True, fields=None): + """Return project entity document by project name. + + Args: + project_name (str): Name of project. + active (Optional[bool]): Allow active project. Defaults to True. + inactive (Optional[bool]): Allow inactive project. Defaults to True. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Project entity data which can be reduced to + specified 'fields'. None is returned if project with specified + filters was not found. + """ + # Skip if both are disabled + if not active and not inactive: + return None + + query_filter = {"type": "project"} + # Keep query untouched if both should be available + if active and inactive: + pass + + # Add filter to keep only active + elif active: + query_filter["$or"] = [ + {"data.active": {"$exists": False}}, + {"data.active": True}, + ] + + # Add filter to keep only inactive + elif inactive: + query_filter["$or"] = [ + {"data.active": {"$exists": False}}, + {"data.active": False}, + ] + + conn = get_project_connection(project_name) + return conn.find_one(query_filter, _prepare_fields(fields)) + + +def get_whole_project(project_name): + """Receive all documents from project. + + Helper that can be used to get all document from whole project. For example + for backups etc. + + Returns: + Cursor: Query cursor as iterable which returns all documents from + project collection. + """ + + conn = get_project_connection(project_name) + return conn.find({}) + + +def get_asset_by_id(project_name, asset_id, fields=None): + """Receive asset data by its id. + + Args: + project_name (str): Name of project where to look for queried entities. + asset_id (Union[str, ObjectId]): Asset's id. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Asset entity data which can be reduced to + specified 'fields'. None is returned if asset with specified + filters was not found. + """ + + asset_id = convert_id(asset_id) + if not asset_id: + return None + + query_filter = {"type": "asset", "_id": asset_id} + conn = get_project_connection(project_name) + return conn.find_one(query_filter, _prepare_fields(fields)) + + +def get_asset_by_name(project_name, asset_name, fields=None): + """Receive asset data by its name. + + Args: + project_name (str): Name of project where to look for queried entities. + asset_name (str): Asset's name. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Asset entity data which can be reduced to + specified 'fields'. None is returned if asset with specified + filters was not found. + """ + + if not asset_name: + return None + + query_filter = {"type": "asset", "name": asset_name} + conn = get_project_connection(project_name) + return conn.find_one(query_filter, _prepare_fields(fields)) + + +# NOTE this could be just public function? +# - any better variable name instead of 'standard'? +# - same approach can be used for rest of types +def _get_assets( + project_name, + asset_ids=None, + asset_names=None, + parent_ids=None, + standard=True, + archived=False, + fields=None +): + """Assets for specified project by passed filters. + + Passed filters (ids and names) are always combined so all conditions must + match. + + To receive all assets from project just keep filters empty. + + Args: + project_name (str): Name of project where to look for queried entities. + asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should + be found. + asset_names (Iterable[str]): Name assets that should be found. + parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids. + standard (bool): Query standard assets (type 'asset'). + archived (bool): Query archived assets (type 'archived_asset'). + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Cursor: Query cursor as iterable which returns asset documents matching + passed filters. + """ + + asset_types = [] + if standard: + asset_types.append("asset") + if archived: + asset_types.append("archived_asset") + + if not asset_types: + return [] + + if len(asset_types) == 1: + query_filter = {"type": asset_types[0]} + else: + query_filter = {"type": {"$in": asset_types}} + + if asset_ids is not None: + asset_ids = convert_ids(asset_ids) + if not asset_ids: + return [] + query_filter["_id"] = {"$in": asset_ids} + + if asset_names is not None: + if not asset_names: + return [] + query_filter["name"] = {"$in": list(asset_names)} + + if parent_ids is not None: + parent_ids = convert_ids(parent_ids) + if not parent_ids: + return [] + query_filter["data.visualParent"] = {"$in": parent_ids} + + conn = get_project_connection(project_name) + + return conn.find(query_filter, _prepare_fields(fields)) + + +def get_assets( + project_name, + asset_ids=None, + asset_names=None, + parent_ids=None, + archived=False, + fields=None +): + """Assets for specified project by passed filters. + + Passed filters (ids and names) are always combined so all conditions must + match. + + To receive all assets from project just keep filters empty. + + Args: + project_name (str): Name of project where to look for queried entities. + asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should + be found. + asset_names (Iterable[str]): Name assets that should be found. + parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids. + archived (bool): Add also archived assets. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Cursor: Query cursor as iterable which returns asset documents matching + passed filters. + """ + + return _get_assets( + project_name, + asset_ids, + asset_names, + parent_ids, + True, + archived, + fields + ) + + +def get_archived_assets( + project_name, + asset_ids=None, + asset_names=None, + parent_ids=None, + fields=None +): + """Archived assets for specified project by passed filters. + + Passed filters (ids and names) are always combined so all conditions must + match. + + To receive all archived assets from project just keep filters empty. + + Args: + project_name (str): Name of project where to look for queried entities. + asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should + be found. + asset_names (Iterable[str]): Name assets that should be found. + parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Cursor: Query cursor as iterable which returns asset documents matching + passed filters. + """ + + return _get_assets( + project_name, asset_ids, asset_names, parent_ids, False, True, fields + ) + + +def get_asset_ids_with_subsets(project_name, asset_ids=None): + """Find out which assets have existing subsets. + + Args: + project_name (str): Name of project where to look for queried entities. + asset_ids (Iterable[Union[str, ObjectId]]): Look only for entered + asset ids. + + Returns: + Iterable[ObjectId]: Asset ids that have existing subsets. + """ + + subset_query = { + "type": "subset" + } + if asset_ids is not None: + asset_ids = convert_ids(asset_ids) + if not asset_ids: + return [] + subset_query["parent"] = {"$in": asset_ids} + + conn = get_project_connection(project_name) + result = conn.aggregate([ + { + "$match": subset_query + }, + { + "$group": { + "_id": "$parent", + "count": {"$sum": 1} + } + } + ]) + asset_ids_with_subsets = [] + for item in result: + asset_id = item["_id"] + count = item["count"] + if count > 0: + asset_ids_with_subsets.append(asset_id) + return asset_ids_with_subsets + + +def get_subset_by_id(project_name, subset_id, fields=None): + """Single subset entity data by its id. + + Args: + project_name (str): Name of project where to look for queried entities. + subset_id (Union[str, ObjectId]): Id of subset which should be found. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Subset entity data which can be reduced to + specified 'fields'. None is returned if subset with specified + filters was not found. + """ + + subset_id = convert_id(subset_id) + if not subset_id: + return None + + query_filters = {"type": "subset", "_id": subset_id} + conn = get_project_connection(project_name) + return conn.find_one(query_filters, _prepare_fields(fields)) + + +def get_subset_by_name(project_name, subset_name, asset_id, fields=None): + """Single subset entity data by its name and its version id. + + Args: + project_name (str): Name of project where to look for queried entities. + subset_name (str): Name of subset. + asset_id (Union[str, ObjectId]): Id of parent asset. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Subset entity data which can be reduced to + specified 'fields'. None is returned if subset with specified + filters was not found. + """ + if not subset_name: + return None + + asset_id = convert_id(asset_id) + if not asset_id: + return None + + query_filters = { + "type": "subset", + "name": subset_name, + "parent": asset_id + } + conn = get_project_connection(project_name) + return conn.find_one(query_filters, _prepare_fields(fields)) + + +def get_subsets( + project_name, + subset_ids=None, + subset_names=None, + asset_ids=None, + names_by_asset_ids=None, + archived=False, + fields=None +): + """Subset entities data from one project filtered by entered filters. + + Filters are additive (all conditions must pass to return subset). + + Args: + project_name (str): Name of project where to look for queried entities. + subset_ids (Iterable[Union[str, ObjectId]]): Subset ids that should be + queried. Filter ignored if 'None' is passed. + subset_names (Iterable[str]): Subset names that should be queried. + Filter ignored if 'None' is passed. + asset_ids (Iterable[Union[str, ObjectId]]): Asset ids under which + should look for the subsets. Filter ignored if 'None' is passed. + names_by_asset_ids (dict[ObjectId, List[str]]): Complex filtering + using asset ids and list of subset names under the asset. + archived (bool): Look for archived subsets too. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Cursor: Iterable cursor yielding all matching subsets. + """ + + subset_types = ["subset"] + if archived: + subset_types.append("archived_subset") + + if len(subset_types) == 1: + query_filter = {"type": subset_types[0]} + else: + query_filter = {"type": {"$in": subset_types}} + + if asset_ids is not None: + asset_ids = convert_ids(asset_ids) + if not asset_ids: + return [] + query_filter["parent"] = {"$in": asset_ids} + + if subset_ids is not None: + subset_ids = convert_ids(subset_ids) + if not subset_ids: + return [] + query_filter["_id"] = {"$in": subset_ids} + + if subset_names is not None: + if not subset_names: + return [] + query_filter["name"] = {"$in": list(subset_names)} + + if names_by_asset_ids is not None: + or_query = [] + for asset_id, names in names_by_asset_ids.items(): + if asset_id and names: + or_query.append({ + "parent": convert_id(asset_id), + "name": {"$in": list(names)} + }) + if not or_query: + return [] + query_filter["$or"] = or_query + + conn = get_project_connection(project_name) + return conn.find(query_filter, _prepare_fields(fields)) + + +def get_subset_families(project_name, subset_ids=None): + """Set of main families of subsets. + + Args: + project_name (str): Name of project where to look for queried entities. + subset_ids (Iterable[Union[str, ObjectId]]): Subset ids that should + be queried. All subsets from project are used if 'None' is passed. + + Returns: + set[str]: Main families of matching subsets. + """ + + subset_filter = { + "type": "subset" + } + if subset_ids is not None: + if not subset_ids: + return set() + subset_filter["_id"] = {"$in": list(subset_ids)} + + conn = get_project_connection(project_name) + result = list(conn.aggregate([ + {"$match": subset_filter}, + {"$project": { + "family": {"$arrayElemAt": ["$data.families", 0]} + }}, + {"$group": { + "_id": "family_group", + "families": {"$addToSet": "$family"} + }} + ])) + if result: + return set(result[0]["families"]) + return set() + + +def get_version_by_id(project_name, version_id, fields=None): + """Single version entity data by its id. + + Args: + project_name (str): Name of project where to look for queried entities. + version_id (Union[str, ObjectId]): Id of version which should be found. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Version entity data which can be reduced to + specified 'fields'. None is returned if version with specified + filters was not found. + """ + + version_id = convert_id(version_id) + if not version_id: + return None + + query_filter = { + "type": {"$in": ["version", "hero_version"]}, + "_id": version_id + } + conn = get_project_connection(project_name) + return conn.find_one(query_filter, _prepare_fields(fields)) + + +def get_version_by_name(project_name, version, subset_id, fields=None): + """Single version entity data by its name and subset id. + + Args: + project_name (str): Name of project where to look for queried entities. + version (int): name of version entity (its version). + subset_id (Union[str, ObjectId]): Id of version which should be found. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Version entity data which can be reduced to + specified 'fields'. None is returned if version with specified + filters was not found. + """ + + subset_id = convert_id(subset_id) + if not subset_id: + return None + + conn = get_project_connection(project_name) + query_filter = { + "type": "version", + "parent": subset_id, + "name": version + } + return conn.find_one(query_filter, _prepare_fields(fields)) + + +def version_is_latest(project_name, version_id): + """Is version the latest from its subset. + + Note: + Hero versions are considered as latest. + + Todo: + Maybe raise exception when version was not found? + + Args: + project_name (str):Name of project where to look for queried entities. + version_id (Union[str, ObjectId]): Version id which is checked. + + Returns: + bool: True if is latest version from subset else False. + """ + + version_id = convert_id(version_id) + if not version_id: + return False + version_doc = get_version_by_id( + project_name, version_id, fields=["_id", "type", "parent"] + ) + # What to do when version is not found? + if not version_doc: + return False + + if version_doc["type"] == "hero_version": + return True + + last_version = get_last_version_by_subset_id( + project_name, version_doc["parent"], fields=["_id"] + ) + return last_version["_id"] == version_id + + +def _get_versions( + project_name, + subset_ids=None, + version_ids=None, + versions=None, + standard=True, + hero=False, + fields=None +): + version_types = [] + if standard: + version_types.append("version") + + if hero: + version_types.append("hero_version") + + if not version_types: + return [] + elif len(version_types) == 1: + query_filter = {"type": version_types[0]} + else: + query_filter = {"type": {"$in": version_types}} + + if subset_ids is not None: + subset_ids = convert_ids(subset_ids) + if not subset_ids: + return [] + query_filter["parent"] = {"$in": subset_ids} + + if version_ids is not None: + version_ids = convert_ids(version_ids) + if not version_ids: + return [] + query_filter["_id"] = {"$in": version_ids} + + if versions is not None: + versions = list(versions) + if not versions: + return [] + + if len(versions) == 1: + query_filter["name"] = versions[0] + else: + query_filter["name"] = {"$in": versions} + + conn = get_project_connection(project_name) + + return conn.find(query_filter, _prepare_fields(fields)) + + +def get_versions( + project_name, + version_ids=None, + subset_ids=None, + versions=None, + hero=False, + fields=None +): + """Version entities data from one project filtered by entered filters. + + Filters are additive (all conditions must pass to return subset). + + Args: + project_name (str): Name of project where to look for queried entities. + version_ids (Iterable[Union[str, ObjectId]]): Version ids that will + be queried. Filter ignored if 'None' is passed. + subset_ids (Iterable[str]): Subset ids that will be queried. + Filter ignored if 'None' is passed. + versions (Iterable[int]): Version names (as integers). + Filter ignored if 'None' is passed. + hero (bool): Look also for hero versions. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Cursor: Iterable cursor yielding all matching versions. + """ + + return _get_versions( + project_name, + subset_ids, + version_ids, + versions, + standard=True, + hero=hero, + fields=fields + ) + + +def get_hero_version_by_subset_id(project_name, subset_id, fields=None): + """Hero version by subset id. + + Args: + project_name (str): Name of project where to look for queried entities. + subset_id (Union[str, ObjectId]): Subset id under which + is hero version. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Hero version entity data which can be reduced to + specified 'fields'. None is returned if hero version with specified + filters was not found. + """ + + subset_id = convert_id(subset_id) + if not subset_id: + return None + + versions = list(_get_versions( + project_name, + subset_ids=[subset_id], + standard=False, + hero=True, + fields=fields + )) + if versions: + return versions[0] + return None + + +def get_hero_version_by_id(project_name, version_id, fields=None): + """Hero version by its id. + + Args: + project_name (str): Name of project where to look for queried entities. + version_id (Union[str, ObjectId]): Hero version id. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Hero version entity data which can be reduced to + specified 'fields'. None is returned if hero version with specified + filters was not found. + """ + + version_id = convert_id(version_id) + if not version_id: + return None + + versions = list(_get_versions( + project_name, + version_ids=[version_id], + standard=False, + hero=True, + fields=fields + )) + if versions: + return versions[0] + return None + + +def get_hero_versions( + project_name, + subset_ids=None, + version_ids=None, + fields=None +): + """Hero version entities data from one project filtered by entered filters. + + Args: + project_name (str): Name of project where to look for queried entities. + subset_ids (Iterable[Union[str, ObjectId]]): Subset ids for which + should look for hero versions. Filter ignored if 'None' is passed. + version_ids (Iterable[Union[str, ObjectId]]): Hero version ids. Filter + ignored if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Cursor|list: Iterable yielding hero versions matching passed filters. + """ + + return _get_versions( + project_name, + subset_ids, + version_ids, + standard=False, + hero=True, + fields=fields + ) + + +def get_output_link_versions(project_name, version_id, fields=None): + """Versions where passed version was used as input. + + Question: + Not 100% sure about the usage of the function so the name and docstring + maybe does not match what it does? + + Args: + project_name (str): Name of project where to look for queried entities. + version_id (Union[str, ObjectId]): Version id which can be used + as input link for other versions. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Iterable: Iterable cursor yielding versions that are used as input + links for passed version. + """ + + version_id = convert_id(version_id) + if not version_id: + return [] + + conn = get_project_connection(project_name) + # Does make sense to look for hero versions? + query_filter = { + "type": "version", + "data.inputLinks.id": version_id + } + return conn.find(query_filter, _prepare_fields(fields)) + + +def get_last_versions(project_name, subset_ids, active=None, fields=None): + """Latest versions for entered subset_ids. + + Args: + project_name (str): Name of project where to look for queried entities. + subset_ids (Iterable[Union[str, ObjectId]]): List of subset ids. + active (Optional[bool]): If True only active versions are returned. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + dict[ObjectId, int]: Key is subset id and value is last version name. + """ + + subset_ids = convert_ids(subset_ids) + if not subset_ids: + return {} + + if fields is not None: + fields = list(fields) + if not fields: + return {} + + # Avoid double query if only name and _id are requested + name_needed = False + limit_query = False + if fields: + fields_s = set(fields) + if "name" in fields_s: + name_needed = True + fields_s.remove("name") + + for field in ("_id", "parent"): + if field in fields_s: + fields_s.remove(field) + limit_query = len(fields_s) == 0 + + group_item = { + "_id": "$parent", + "_version_id": {"$last": "$_id"} + } + # Add name if name is needed (only for limit query) + if name_needed: + group_item["name"] = {"$last": "$name"} + + aggregate_filter = { + "type": "version", + "parent": {"$in": subset_ids} + } + if active is False: + aggregate_filter["data.active"] = active + elif active is True: + aggregate_filter["$or"] = [ + {"data.active": {"$exists": 0}}, + {"data.active": active}, + ] + + aggregation_pipeline = [ + # Find all versions of those subsets + {"$match": aggregate_filter}, + # Sorting versions all together + {"$sort": {"name": 1}}, + # Group them by "parent", but only take the last + {"$group": group_item} + ] + + conn = get_project_connection(project_name) + aggregate_result = conn.aggregate(aggregation_pipeline) + if limit_query: + output = {} + for item in aggregate_result: + subset_id = item["_id"] + item_data = {"_id": item["_version_id"], "parent": subset_id} + if name_needed: + item_data["name"] = item["name"] + output[subset_id] = item_data + return output + + version_ids = [ + doc["_version_id"] + for doc in aggregate_result + ] + + fields = _prepare_fields(fields, ["parent"]) + + version_docs = get_versions( + project_name, version_ids=version_ids, fields=fields + ) + + return { + version_doc["parent"]: version_doc + for version_doc in version_docs + } + + +def get_last_version_by_subset_id(project_name, subset_id, fields=None): + """Last version for passed subset id. + + Args: + project_name (str): Name of project where to look for queried entities. + subset_id (Union[str, ObjectId]): Id of version which should be found. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Version entity data which can be reduced to + specified 'fields'. None is returned if version with specified + filters was not found. + """ + + subset_id = convert_id(subset_id) + if not subset_id: + return None + + last_versions = get_last_versions( + project_name, subset_ids=[subset_id], fields=fields + ) + return last_versions.get(subset_id) + + +def get_last_version_by_subset_name( + project_name, subset_name, asset_id=None, asset_name=None, fields=None +): + """Last version for passed subset name under asset id/name. + + It is required to pass 'asset_id' or 'asset_name'. Asset id is recommended + if is available. + + Args: + project_name (str): Name of project where to look for queried entities. + subset_name (str): Name of subset. + asset_id (Union[str, ObjectId]): Asset id which is parent of passed + subset name. + asset_name (str): Asset name which is parent of passed subset name. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Version entity data which can be reduced to + specified 'fields'. None is returned if version with specified + filters was not found. + """ + + if not asset_id and not asset_name: + return None + + if not asset_id: + asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) + if not asset_doc: + return None + asset_id = asset_doc["_id"] + subset_doc = get_subset_by_name( + project_name, subset_name, asset_id, fields=["_id"] + ) + if not subset_doc: + return None + return get_last_version_by_subset_id( + project_name, subset_doc["_id"], fields=fields + ) + + +def get_representation_by_id(project_name, representation_id, fields=None): + """Representation entity data by its id. + + Args: + project_name (str): Name of project where to look for queried entities. + representation_id (Union[str, ObjectId]): Representation id. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Representation entity data which can be reduced to + specified 'fields'. None is returned if representation with + specified filters was not found. + """ + + if not representation_id: + return None + + repre_types = ["representation", "archived_representation"] + query_filter = { + "type": {"$in": repre_types} + } + if representation_id is not None: + query_filter["_id"] = convert_id(representation_id) + + conn = get_project_connection(project_name) + + return conn.find_one(query_filter, _prepare_fields(fields)) + + +def get_representation_by_name( + project_name, representation_name, version_id, fields=None +): + """Representation entity data by its name and its version id. + + Args: + project_name (str): Name of project where to look for queried entities. + representation_name (str): Representation name. + version_id (Union[str, ObjectId]): Id of parent version entity. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[dict[str, Any], None]: Representation entity data which can be + reduced to specified 'fields'. None is returned if representation + with specified filters was not found. + """ + + version_id = convert_id(version_id) + if not version_id or not representation_name: + return None + repre_types = ["representation", "archived_representations"] + query_filter = { + "type": {"$in": repre_types}, + "name": representation_name, + "parent": version_id + } + + conn = get_project_connection(project_name) + return conn.find_one(query_filter, _prepare_fields(fields)) + + +def _flatten_dict(data): + flatten_queue = collections.deque() + flatten_queue.append(data) + output = {} + while flatten_queue: + item = flatten_queue.popleft() + for key, value in item.items(): + if not isinstance(value, dict): + output[key] = value + continue + + tmp = {} + for subkey, subvalue in value.items(): + new_key = "{}.{}".format(key, subkey) + tmp[new_key] = subvalue + flatten_queue.append(tmp) + return output + + +def _regex_filters(filters): + output = [] + for key, value in filters.items(): + regexes = [] + a_values = [] + if isinstance(value, PatternType): + regexes.append(value) + elif isinstance(value, (list, tuple, set)): + for item in value: + if isinstance(item, PatternType): + regexes.append(item) + else: + a_values.append(item) + else: + a_values.append(value) + + key_filters = [] + if len(a_values) == 1: + key_filters.append({key: a_values[0]}) + elif a_values: + key_filters.append({key: {"$in": a_values}}) + + for regex in regexes: + key_filters.append({key: {"$regex": regex}}) + + if len(key_filters) == 1: + output.append(key_filters[0]) + else: + output.append({"$or": key_filters}) + + return output + + +def _get_representations( + project_name, + representation_ids, + representation_names, + version_ids, + context_filters, + names_by_version_ids, + standard, + archived, + fields +): + default_output = [] + repre_types = [] + if standard: + repre_types.append("representation") + if archived: + repre_types.append("archived_representation") + + if not repre_types: + return default_output + + if len(repre_types) == 1: + query_filter = {"type": repre_types[0]} + else: + query_filter = {"type": {"$in": repre_types}} + + if representation_ids is not None: + representation_ids = convert_ids(representation_ids) + if not representation_ids: + return default_output + query_filter["_id"] = {"$in": representation_ids} + + if representation_names is not None: + if not representation_names: + return default_output + query_filter["name"] = {"$in": list(representation_names)} + + if version_ids is not None: + version_ids = convert_ids(version_ids) + if not version_ids: + return default_output + query_filter["parent"] = {"$in": version_ids} + + or_queries = [] + if names_by_version_ids is not None: + or_query = [] + for version_id, names in names_by_version_ids.items(): + if version_id and names: + or_query.append({ + "parent": convert_id(version_id), + "name": {"$in": list(names)} + }) + if not or_query: + return default_output + or_queries.append(or_query) + + if context_filters is not None: + if not context_filters: + return [] + _flatten_filters = _flatten_dict(context_filters) + flatten_filters = {} + for key, value in _flatten_filters.items(): + if not key.startswith("context"): + key = "context.{}".format(key) + flatten_filters[key] = value + + for item in _regex_filters(flatten_filters): + for key, value in item.items(): + if key != "$or": + query_filter[key] = value + + elif value: + or_queries.append(value) + + if len(or_queries) == 1: + query_filter["$or"] = or_queries[0] + elif or_queries: + and_query = [] + for or_query in or_queries: + if isinstance(or_query, list): + or_query = {"$or": or_query} + and_query.append(or_query) + query_filter["$and"] = and_query + + conn = get_project_connection(project_name) + + return conn.find(query_filter, _prepare_fields(fields)) + + +def get_representations( + project_name, + representation_ids=None, + representation_names=None, + version_ids=None, + context_filters=None, + names_by_version_ids=None, + archived=False, + standard=True, + fields=None +): + """Representation entities data from one project filtered by filters. + + Filters are additive (all conditions must pass to return subset). + + Args: + project_name (str): Name of project where to look for queried entities. + representation_ids (Iterable[Union[str, ObjectId]]): Representation ids + used as filter. Filter ignored if 'None' is passed. + representation_names (Iterable[str]): Representations names used + as filter. Filter ignored if 'None' is passed. + version_ids (Iterable[str]): Subset ids used as parent filter. Filter + ignored if 'None' is passed. + context_filters (Dict[str, List[str, PatternType]]): Filter by + representation context fields. + names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering + using version ids and list of names under the version. + archived (bool): Output will also contain archived representations. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Cursor: Iterable cursor yielding all matching representations. + """ + + return _get_representations( + project_name=project_name, + representation_ids=representation_ids, + representation_names=representation_names, + version_ids=version_ids, + context_filters=context_filters, + names_by_version_ids=names_by_version_ids, + standard=standard, + archived=archived, + fields=fields + ) + + +def get_archived_representations( + project_name, + representation_ids=None, + representation_names=None, + version_ids=None, + context_filters=None, + names_by_version_ids=None, + fields=None +): + """Archived representation entities data from project with applied filters. + + Filters are additive (all conditions must pass to return subset). + + Args: + project_name (str): Name of project where to look for queried entities. + representation_ids (Iterable[Union[str, ObjectId]]): Representation ids + used as filter. Filter ignored if 'None' is passed. + representation_names (Iterable[str]): Representations names used + as filter. Filter ignored if 'None' is passed. + version_ids (Iterable[str]): Subset ids used as parent filter. Filter + ignored if 'None' is passed. + context_filters (Dict[str, List[str, PatternType]]): Filter by + representation context fields. + names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering + using version ids and list of names under the version. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Cursor: Iterable cursor yielding all matching representations. + """ + + return _get_representations( + project_name=project_name, + representation_ids=representation_ids, + representation_names=representation_names, + version_ids=version_ids, + context_filters=context_filters, + names_by_version_ids=names_by_version_ids, + standard=False, + archived=True, + fields=fields + ) + + +def get_representations_parents(project_name, representations): + """Prepare parents of representation entities. + + Each item of returned dictionary contains version, subset, asset + and project in that order. + + Args: + project_name (str): Name of project where to look for queried entities. + representations (List[dict]): Representation entities with at least + '_id' and 'parent' keys. + + Returns: + dict[ObjectId, tuple]: Parents by representation id. + """ + + repre_docs_by_version_id = collections.defaultdict(list) + version_docs_by_version_id = {} + version_docs_by_subset_id = collections.defaultdict(list) + subset_docs_by_subset_id = {} + subset_docs_by_asset_id = collections.defaultdict(list) + output = {} + for repre_doc in representations: + repre_id = repre_doc["_id"] + version_id = repre_doc["parent"] + output[repre_id] = (None, None, None, None) + repre_docs_by_version_id[version_id].append(repre_doc) + + version_docs = get_versions( + project_name, + version_ids=repre_docs_by_version_id.keys(), + hero=True + ) + for version_doc in version_docs: + version_id = version_doc["_id"] + subset_id = version_doc["parent"] + version_docs_by_version_id[version_id] = version_doc + version_docs_by_subset_id[subset_id].append(version_doc) + + subset_docs = get_subsets( + project_name, subset_ids=version_docs_by_subset_id.keys() + ) + for subset_doc in subset_docs: + subset_id = subset_doc["_id"] + asset_id = subset_doc["parent"] + subset_docs_by_subset_id[subset_id] = subset_doc + subset_docs_by_asset_id[asset_id].append(subset_doc) + + asset_docs = get_assets( + project_name, asset_ids=subset_docs_by_asset_id.keys() + ) + asset_docs_by_id = { + asset_doc["_id"]: asset_doc + for asset_doc in asset_docs + } + + project_doc = get_project(project_name) + + for version_id, repre_docs in repre_docs_by_version_id.items(): + asset_doc = None + subset_doc = None + version_doc = version_docs_by_version_id.get(version_id) + if version_doc: + subset_id = version_doc["parent"] + subset_doc = subset_docs_by_subset_id.get(subset_id) + if subset_doc: + asset_id = subset_doc["parent"] + asset_doc = asset_docs_by_id.get(asset_id) + + for repre_doc in repre_docs: + repre_id = repre_doc["_id"] + output[repre_id] = ( + version_doc, subset_doc, asset_doc, project_doc + ) + return output + + +def get_representation_parents(project_name, representation): + """Prepare parents of representation entity. + + Each item of returned dictionary contains version, subset, asset + and project in that order. + + Args: + project_name (str): Name of project where to look for queried entities. + representation (dict): Representation entities with at least + '_id' and 'parent' keys. + + Returns: + dict[ObjectId, tuple]: Parents by representation id. + """ + + if not representation: + return None + + repre_id = representation["_id"] + parents_by_repre_id = get_representations_parents( + project_name, [representation] + ) + return parents_by_repre_id[repre_id] + + +def get_thumbnail_id_from_source(project_name, src_type, src_id): + """Receive thumbnail id from source entity. + + Args: + project_name (str): Name of project where to look for queried entities. + src_type (str): Type of source entity ('asset', 'version'). + src_id (Union[str, ObjectId]): Id of source entity. + + Returns: + Union[ObjectId, None]: Thumbnail id assigned to entity. If Source + entity does not have any thumbnail id assigned. + """ + + if not src_type or not src_id: + return None + + query_filter = {"_id": convert_id(src_id)} + + conn = get_project_connection(project_name) + src_doc = conn.find_one(query_filter, {"data.thumbnail_id"}) + if src_doc: + return src_doc.get("data", {}).get("thumbnail_id") + return None + + +def get_thumbnails(project_name, thumbnail_ids, fields=None): + """Receive thumbnails entity data. + + Thumbnail entity can be used to receive binary content of thumbnail based + on its content and ThumbnailResolvers. + + Args: + project_name (str): Name of project where to look for queried entities. + thumbnail_ids (Iterable[Union[str, ObjectId]]): Ids of thumbnail + entities. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + cursor: Cursor of queried documents. + """ + + if thumbnail_ids: + thumbnail_ids = convert_ids(thumbnail_ids) + + if not thumbnail_ids: + return [] + query_filter = { + "type": "thumbnail", + "_id": {"$in": thumbnail_ids} + } + conn = get_project_connection(project_name) + return conn.find(query_filter, _prepare_fields(fields)) + + +def get_thumbnail(project_name, thumbnail_id, fields=None): + """Receive thumbnail entity data. + + Args: + project_name (str): Name of project where to look for queried entities. + thumbnail_id (Union[str, ObjectId]): Id of thumbnail entity. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Thumbnail entity data which can be reduced to + specified 'fields'.None is returned if thumbnail with specified + filters was not found. + """ + + if not thumbnail_id: + return None + query_filter = {"type": "thumbnail", "_id": convert_id(thumbnail_id)} + conn = get_project_connection(project_name) + return conn.find_one(query_filter, _prepare_fields(fields)) + + +def get_workfile_info( + project_name, asset_id, task_name, filename, fields=None +): + """Document with workfile information. + + Warning: + Query is based on filename and context which does not meant it will + find always right and expected result. Information have limited usage + and is not recommended to use it as source information about workfile. + + Args: + project_name (str): Name of project where to look for queried entities. + asset_id (Union[str, ObjectId]): Id of asset entity. + task_name (str): Task name on asset. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Workfile entity data which can be reduced to + specified 'fields'.None is returned if workfile with specified + filters was not found. + """ + + if not asset_id or not task_name or not filename: + return None + + query_filter = { + "type": "workfile", + "parent": convert_id(asset_id), + "task_name": task_name, + "filename": filename + } + conn = get_project_connection(project_name) + return conn.find_one(query_filter, _prepare_fields(fields)) + + +""" +## Custom data storage: +- Settings - OP settings overrides and local settings +- Logging - logs from Logger +- Webpublisher - jobs +- Ftrack - events +- Maya - Shaders + - openpype/hosts/maya/api/shader_definition_editor.py + - openpype/hosts/maya/plugins/publish/validate_model_name.py + +## Global publish plugins +- openpype/plugins/publish/extract_hierarchy_avalon.py + Create: + - asset + Update: + - asset + +## Lib +- openpype/lib/avalon_context.py + Update: + - workfile data +- openpype/lib/project_backpack.py + Update: + - project +""" diff --git a/openpype/client/mongo/entity_links.py b/openpype/client/mongo/entity_links.py new file mode 100644 index 0000000000..c97a828118 --- /dev/null +++ b/openpype/client/mongo/entity_links.py @@ -0,0 +1,244 @@ +from .mongo import get_project_connection +from .entities import ( + get_assets, + get_asset_by_id, + get_version_by_id, + get_representation_by_id, + convert_id, +) + + +def get_linked_asset_ids(project_name, asset_doc=None, asset_id=None): + """Extract linked asset ids from asset document. + + One of asset document or asset id must be passed. + + Note: + Asset links now works only from asset to assets. + + Args: + asset_doc (dict): Asset document from DB. + + Returns: + List[Union[ObjectId, str]]: Asset ids of input links. + """ + + output = [] + if not asset_doc and not asset_id: + return output + + if not asset_doc: + asset_doc = get_asset_by_id( + project_name, asset_id, fields=["data.inputLinks"] + ) + + input_links = asset_doc["data"].get("inputLinks") + if not input_links: + return output + + for item in input_links: + # Backwards compatibility for "_id" key which was replaced with + # "id" + if "_id" in item: + link_id = item["_id"] + else: + link_id = item["id"] + output.append(link_id) + return output + + +def get_linked_assets( + project_name, asset_doc=None, asset_id=None, fields=None +): + """Return linked assets based on passed asset document. + + One of asset document or asset id must be passed. + + Args: + project_name (str): Name of project where to look for queried entities. + asset_doc (Dict[str, Any]): Asset document from database. + asset_id (Union[ObjectId, str]): Asset id. Can be used instead of + asset document. + fields (Iterable[str]): Fields that should be returned. All fields are + returned if 'None' is passed. + + Returns: + List[Dict[str, Any]]: Asset documents of input links for passed + asset doc. + """ + + if not asset_doc: + if not asset_id: + return [] + asset_doc = get_asset_by_id( + project_name, + asset_id, + fields=["data.inputLinks"] + ) + if not asset_doc: + return [] + + link_ids = get_linked_asset_ids(project_name, asset_doc=asset_doc) + if not link_ids: + return [] + + return list(get_assets(project_name, asset_ids=link_ids, fields=fields)) + + +def get_linked_representation_id( + project_name, repre_doc=None, repre_id=None, link_type=None, max_depth=None +): + """Returns list of linked ids of particular type (if provided). + + One of representation document or representation id must be passed. + Note: + Representation links now works only from representation through version + back to representations. + + Args: + project_name (str): Name of project where look for links. + repre_doc (Dict[str, Any]): Representation document. + repre_id (Union[ObjectId, str]): Representation id. + link_type (str): Type of link (e.g. 'reference', ...). + max_depth (int): Limit recursion level. Default: 0 + + Returns: + List[ObjectId] Linked representation ids. + """ + + if repre_doc: + repre_id = repre_doc["_id"] + + if repre_id: + repre_id = convert_id(repre_id) + + if not repre_id and not repre_doc: + return [] + + version_id = None + if repre_doc: + version_id = repre_doc.get("parent") + + if not version_id: + repre_doc = get_representation_by_id( + project_name, repre_id, fields=["parent"] + ) + version_id = repre_doc["parent"] + + if not version_id: + return [] + + version_doc = get_version_by_id( + project_name, version_id, fields=["type", "version_id"] + ) + if version_doc["type"] == "hero_version": + version_id = version_doc["version_id"] + + if max_depth is None: + max_depth = 0 + + match = { + "_id": version_id, + # Links are not stored to hero versions at this moment so filter + # is limited to just versions + "type": "version" + } + + graph_lookup = { + "from": project_name, + "startWith": "$data.inputLinks.id", + "connectFromField": "data.inputLinks.id", + "connectToField": "_id", + "as": "outputs_recursive", + "depthField": "depth" + } + if max_depth != 0: + # We offset by -1 since 0 basically means no recursion + # but the recursion only happens after the initial lookup + # for outputs. + graph_lookup["maxDepth"] = max_depth - 1 + + query_pipeline = [ + # Match + {"$match": match}, + # Recursive graph lookup for inputs + {"$graphLookup": graph_lookup} + ] + + conn = get_project_connection(project_name) + result = conn.aggregate(query_pipeline) + referenced_version_ids = _process_referenced_pipeline_result( + result, link_type + ) + if not referenced_version_ids: + return [] + + ref_ids = conn.distinct( + "_id", + filter={ + "parent": {"$in": list(referenced_version_ids)}, + "type": "representation" + } + ) + + return list(ref_ids) + + +def _process_referenced_pipeline_result(result, link_type): + """Filters result from pipeline for particular link_type. + + Pipeline cannot use link_type directly in a query. + + Returns: + (list) + """ + + referenced_version_ids = set() + correctly_linked_ids = set() + for item in result: + input_links = item.get("data", {}).get("inputLinks") + if not input_links: + continue + + _filter_input_links( + input_links, + link_type, + correctly_linked_ids + ) + + # outputs_recursive in random order, sort by depth + outputs_recursive = item.get("outputs_recursive") + if not outputs_recursive: + continue + + for output in sorted(outputs_recursive, key=lambda o: o["depth"]): + output_links = output.get("data", {}).get("inputLinks") + if not output_links and output["type"] != "hero_version": + continue + + # Leaf + if output["_id"] not in correctly_linked_ids: + continue + + _filter_input_links( + output_links, + link_type, + correctly_linked_ids + ) + + referenced_version_ids.add(output["_id"]) + + return referenced_version_ids + + +def _filter_input_links(input_links, link_type, correctly_linked_ids): + if not input_links: # to handle hero versions + return + + for input_link in input_links: + if link_type and input_link["type"] != link_type: + continue + + link_id = input_link.get("id") or input_link.get("_id") + if link_id is not None: + correctly_linked_ids.add(link_id) diff --git a/openpype/client/mongo.py b/openpype/client/mongo/mongo.py similarity index 98% rename from openpype/client/mongo.py rename to openpype/client/mongo/mongo.py index 251041c028..ad85782996 100644 --- a/openpype/client/mongo.py +++ b/openpype/client/mongo/mongo.py @@ -11,6 +11,7 @@ from bson.json_util import ( CANONICAL_JSON_OPTIONS ) +from openpype import AYON_SERVER_ENABLED if sys.version_info[0] == 2: from urlparse import urlparse, parse_qs else: @@ -206,6 +207,8 @@ class OpenPypeMongoConnection: @classmethod def create_connection(cls, mongo_url, timeout=None, retry_attempts=None): + if AYON_SERVER_ENABLED: + raise RuntimeError("Created mongo connection in AYON mode") parsed = urlparse(mongo_url) # Force validation of scheme if parsed.scheme not in ["mongodb", "mongodb+srv"]: diff --git a/openpype/client/mongo/operations.py b/openpype/client/mongo/operations.py new file mode 100644 index 0000000000..3537aa4a3d --- /dev/null +++ b/openpype/client/mongo/operations.py @@ -0,0 +1,632 @@ +import re +import copy +import collections + +from bson.objectid import ObjectId +from pymongo import DeleteOne, InsertOne, UpdateOne + +from openpype.client.operations_base import ( + REMOVED_VALUE, + CreateOperation, + UpdateOperation, + DeleteOperation, + BaseOperationsSession +) +from .mongo import get_project_connection +from .entities import get_project + + +PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_" +PROJECT_NAME_REGEX = re.compile( + "^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS) +) + +CURRENT_PROJECT_SCHEMA = "openpype:project-3.0" +CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0" +CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0" +CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0" +CURRENT_VERSION_SCHEMA = "openpype:version-3.0" +CURRENT_HERO_VERSION_SCHEMA = "openpype:hero_version-1.0" +CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0" +CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0" +CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0" + + +def _create_or_convert_to_mongo_id(mongo_id): + if mongo_id is None: + return ObjectId() + return ObjectId(mongo_id) + + +def new_project_document( + project_name, project_code, config, data=None, entity_id=None +): + """Create skeleton data of project document. + + Args: + project_name (str): Name of project. Used as identifier of a project. + project_code (str): Shorter version of projet without spaces and + special characters (in most of cases). Should be also considered + as unique name across projects. + config (Dic[str, Any]): Project config consist of roots, templates, + applications and other project Anatomy related data. + data (Dict[str, Any]): Project data with information about it's + attributes (e.g. 'fps' etc.) or integration specific keys. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of project document. + """ + + if data is None: + data = {} + + data["code"] = project_code + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "name": project_name, + "type": CURRENT_PROJECT_SCHEMA, + "entity_data": data, + "config": config + } + + +def new_asset_document( + name, project_id, parent_id, parents, data=None, entity_id=None +): + """Create skeleton data of asset document. + + Args: + name (str): Is considered as unique identifier of asset in project. + project_id (Union[str, ObjectId]): Id of project doument. + parent_id (Union[str, ObjectId]): Id of parent asset. + parents (List[str]): List of parent assets names. + data (Dict[str, Any]): Asset document data. Empty dictionary is used + if not passed. Value of 'parent_id' is used to fill 'visualParent'. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of asset document. + """ + + if data is None: + data = {} + if parent_id is not None: + parent_id = ObjectId(parent_id) + data["visualParent"] = parent_id + data["parents"] = parents + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "type": "asset", + "name": name, + "parent": ObjectId(project_id), + "data": data, + "schema": CURRENT_ASSET_DOC_SCHEMA + } + + +def new_subset_document(name, family, asset_id, data=None, entity_id=None): + """Create skeleton data of subset document. + + Args: + name (str): Is considered as unique identifier of subset under asset. + family (str): Subset's family. + asset_id (Union[str, ObjectId]): Id of parent asset. + data (Dict[str, Any]): Subset document data. Empty dictionary is used + if not passed. Value of 'family' is used to fill 'family'. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of subset document. + """ + + if data is None: + data = {} + data["family"] = family + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_SUBSET_SCHEMA, + "type": "subset", + "name": name, + "data": data, + "parent": asset_id + } + + +def new_version_doc(version, subset_id, data=None, entity_id=None): + """Create skeleton data of version document. + + Args: + version (int): Is considered as unique identifier of version + under subset. + subset_id (Union[str, ObjectId]): Id of parent subset. + data (Dict[str, Any]): Version document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_VERSION_SCHEMA, + "type": "version", + "name": int(version), + "parent": subset_id, + "data": data + } + + +def new_hero_version_doc(version_id, subset_id, data=None, entity_id=None): + """Create skeleton data of hero version document. + + Args: + version_id (ObjectId): Is considered as unique identifier of version + under subset. + subset_id (Union[str, ObjectId]): Id of parent subset. + data (Dict[str, Any]): Version document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_HERO_VERSION_SCHEMA, + "type": "hero_version", + "version_id": version_id, + "parent": subset_id, + "data": data + } + + +def new_representation_doc( + name, version_id, context, data=None, entity_id=None +): + """Create skeleton data of asset document. + + Args: + version (int): Is considered as unique identifier of version + under subset. + version_id (Union[str, ObjectId]): Id of parent version. + context (Dict[str, Any]): Representation context used for fill template + of to query. + data (Dict[str, Any]): Representation document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_REPRESENTATION_SCHEMA, + "type": "representation", + "parent": version_id, + "name": name, + "data": data, + + # Imprint shortcut to context for performance reasons. + "context": context + } + + +def new_thumbnail_doc(data=None, entity_id=None): + """Create skeleton data of thumbnail document. + + Args: + data (Dict[str, Any]): Thumbnail document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of thumbnail document. + """ + + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "type": "thumbnail", + "schema": CURRENT_THUMBNAIL_SCHEMA, + "data": data + } + + +def new_workfile_info_doc( + filename, asset_id, task_name, files, data=None, entity_id=None +): + """Create skeleton data of workfile info document. + + Workfile document is at this moment used primarily for artist notes. + + Args: + filename (str): Filename of workfile. + asset_id (Union[str, ObjectId]): Id of asset under which workfile live. + task_name (str): Task under which was workfile created. + files (List[str]): List of rootless filepaths related to workfile. + data (Dict[str, Any]): Additional metadata. + + Returns: + Dict[str, Any]: Skeleton of workfile info document. + """ + + if not data: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "type": "workfile", + "parent": ObjectId(asset_id), + "task_name": task_name, + "filename": filename, + "data": data, + "files": files + } + + +def _prepare_update_data(old_doc, new_doc, replace): + changes = {} + for key, value in new_doc.items(): + if key not in old_doc or value != old_doc[key]: + changes[key] = value + + if replace: + for key in old_doc.keys(): + if key not in new_doc: + changes[key] = REMOVED_VALUE + return changes + + +def prepare_subset_update_data(old_doc, new_doc, replace=True): + """Compare two subset documents and prepare update data. + + Based on compared values will create update data for + 'MongoUpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_version_update_data(old_doc, new_doc, replace=True): + """Compare two version documents and prepare update data. + + Based on compared values will create update data for + 'MongoUpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_hero_version_update_data(old_doc, new_doc, replace=True): + """Compare two hero version documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_representation_update_data(old_doc, new_doc, replace=True): + """Compare two representation documents and prepare update data. + + Based on compared values will create update data for + 'MongoUpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_workfile_info_update_data(old_doc, new_doc, replace=True): + """Compare two workfile info documents and prepare update data. + + Based on compared values will create update data for + 'MongoUpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +class MongoCreateOperation(CreateOperation): + """Operation to create an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + data (Dict[str, Any]): Data of entity that will be created. + """ + + operation_name = "create" + + def __init__(self, project_name, entity_type, data): + super(MongoCreateOperation, self).__init__( + project_name, entity_type, data + ) + + if "_id" not in self._data: + self._data["_id"] = ObjectId() + else: + self._data["_id"] = ObjectId(self._data["_id"]) + + @property + def entity_id(self): + return self._data["_id"] + + def to_mongo_operation(self): + return InsertOne(copy.deepcopy(self._data)) + + +class MongoUpdateOperation(UpdateOperation): + """Operation to update an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + entity_id (Union[str, ObjectId]): Identifier of an entity. + update_data (Dict[str, Any]): Key -> value changes that will be set in + database. If value is set to 'REMOVED_VALUE' the key will be + removed. Only first level of dictionary is checked (on purpose). + """ + + operation_name = "update" + + def __init__(self, project_name, entity_type, entity_id, update_data): + super(MongoUpdateOperation, self).__init__( + project_name, entity_type, entity_id, update_data + ) + + self._entity_id = ObjectId(self._entity_id) + + def to_mongo_operation(self): + unset_data = {} + set_data = {} + for key, value in self._update_data.items(): + if value is REMOVED_VALUE: + unset_data[key] = None + else: + set_data[key] = value + + op_data = {} + if unset_data: + op_data["$unset"] = unset_data + if set_data: + op_data["$set"] = set_data + + if not op_data: + return None + + return UpdateOne( + {"_id": self.entity_id}, + op_data + ) + + +class MongoDeleteOperation(DeleteOperation): + """Operation to delete an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + entity_id (Union[str, ObjectId]): Entity id that will be removed. + """ + + operation_name = "delete" + + def __init__(self, project_name, entity_type, entity_id): + super(MongoDeleteOperation, self).__init__( + project_name, entity_type, entity_id + ) + + self._entity_id = ObjectId(self._entity_id) + + def to_mongo_operation(self): + return DeleteOne({"_id": self.entity_id}) + + +class MongoOperationsSession(BaseOperationsSession): + """Session storing operations that should happen in an order. + + At this moment does not handle anything special can be sonsidered as + stupid list of operations that will happen after each other. If creation + of same entity is there multiple times it's handled in any way and document + values are not validated. + + All operations must be related to single project. + + Args: + project_name (str): Project name to which are operations related. + """ + + def commit(self): + """Commit session operations.""" + + operations, self._operations = self._operations, [] + if not operations: + return + + operations_by_project = collections.defaultdict(list) + for operation in operations: + operations_by_project[operation.project_name].append(operation) + + for project_name, operations in operations_by_project.items(): + bulk_writes = [] + for operation in operations: + mongo_op = operation.to_mongo_operation() + if mongo_op is not None: + bulk_writes.append(mongo_op) + + if bulk_writes: + collection = get_project_connection(project_name) + collection.bulk_write(bulk_writes) + + def create_entity(self, project_name, entity_type, data): + """Fast access to 'MongoCreateOperation'. + + Returns: + MongoCreateOperation: Object of update operation. + """ + + operation = MongoCreateOperation(project_name, entity_type, data) + self.add(operation) + return operation + + def update_entity(self, project_name, entity_type, entity_id, update_data): + """Fast access to 'MongoUpdateOperation'. + + Returns: + MongoUpdateOperation: Object of update operation. + """ + + operation = MongoUpdateOperation( + project_name, entity_type, entity_id, update_data + ) + self.add(operation) + return operation + + def delete_entity(self, project_name, entity_type, entity_id): + """Fast access to 'MongoDeleteOperation'. + + Returns: + MongoDeleteOperation: Object of delete operation. + """ + + operation = MongoDeleteOperation(project_name, entity_type, entity_id) + self.add(operation) + return operation + + +def create_project( + project_name, + project_code, + library_project=False, +): + """Create project using OpenPype settings. + + This project creation function is not validating project document on + creation. It is because project document is created blindly with only + minimum required information about project which is it's name, code, type + and schema. + + Entered project name must be unique and project must not exist yet. + + Note: + This function is here to be OP v4 ready but in v3 has more logic + to do. That's why inner imports are in the body. + + Args: + project_name(str): New project name. Should be unique. + project_code(str): Project's code should be unique too. + library_project(bool): Project is library project. + + Raises: + ValueError: When project name already exists in MongoDB. + + Returns: + dict: Created project document. + """ + + from openpype.settings import ProjectSettings, SaveWarningExc + from openpype.pipeline.schema import validate + + if get_project(project_name, fields=["name"]): + raise ValueError("Project with name \"{}\" already exists".format( + project_name + )) + + if not PROJECT_NAME_REGEX.match(project_name): + raise ValueError(( + "Project name \"{}\" contain invalid characters" + ).format(project_name)) + + project_doc = { + "type": "project", + "name": project_name, + "data": { + "code": project_code, + "library_project": library_project + }, + "schema": CURRENT_PROJECT_SCHEMA + } + + op_session = MongoOperationsSession() + # Insert document with basic data + create_op = op_session.create_entity( + project_name, project_doc["type"], project_doc + ) + op_session.commit() + + # Load ProjectSettings for the project and save it to store all attributes + # and Anatomy + try: + project_settings_entity = ProjectSettings(project_name) + project_settings_entity.save() + except SaveWarningExc as exc: + print(str(exc)) + except Exception: + op_session.delete_entity( + project_name, project_doc["type"], create_op.entity_id + ) + op_session.commit() + raise + + project_doc = get_project(project_name) + + try: + # Validate created project document + validate(project_doc) + except Exception: + # Remove project if is not valid + op_session.delete_entity( + project_name, project_doc["type"], create_op.entity_id + ) + op_session.commit() + raise + + return project_doc diff --git a/openpype/client/operations.py b/openpype/client/operations.py index e8c9d28636..8bc09dffd3 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -1,797 +1,24 @@ -import re -import uuid -import copy -import collections -from abc import ABCMeta, abstractmethod, abstractproperty +from openpype import AYON_SERVER_ENABLED -import six -from bson.objectid import ObjectId -from pymongo import DeleteOne, InsertOne, UpdateOne +from .operations_base import REMOVED_VALUE +if not AYON_SERVER_ENABLED: + from .mongo.operations import * + OperationsSession = MongoOperationsSession -from .mongo import get_project_connection -from .entities import get_project - -REMOVED_VALUE = object() - -PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_" -PROJECT_NAME_REGEX = re.compile( - "^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS) -) - -CURRENT_PROJECT_SCHEMA = "openpype:project-3.0" -CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0" -CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0" -CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0" -CURRENT_VERSION_SCHEMA = "openpype:version-3.0" -CURRENT_HERO_VERSION_SCHEMA = "openpype:hero_version-1.0" -CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0" -CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0" -CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0" - - -def _create_or_convert_to_mongo_id(mongo_id): - if mongo_id is None: - return ObjectId() - return ObjectId(mongo_id) - - -def new_project_document( - project_name, project_code, config, data=None, entity_id=None -): - """Create skeleton data of project document. - - Args: - project_name (str): Name of project. Used as identifier of a project. - project_code (str): Shorter version of projet without spaces and - special characters (in most of cases). Should be also considered - as unique name across projects. - config (Dic[str, Any]): Project config consist of roots, templates, - applications and other project Anatomy related data. - data (Dict[str, Any]): Project data with information about it's - attributes (e.g. 'fps' etc.) or integration specific keys. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of project document. - """ - - if data is None: - data = {} - - data["code"] = project_code - - return { - "_id": _create_or_convert_to_mongo_id(entity_id), - "name": project_name, - "type": CURRENT_PROJECT_SCHEMA, - "entity_data": data, - "config": config - } - - -def new_asset_document( - name, project_id, parent_id, parents, data=None, entity_id=None -): - """Create skeleton data of asset document. - - Args: - name (str): Is considered as unique identifier of asset in project. - project_id (Union[str, ObjectId]): Id of project doument. - parent_id (Union[str, ObjectId]): Id of parent asset. - parents (List[str]): List of parent assets names. - data (Dict[str, Any]): Asset document data. Empty dictionary is used - if not passed. Value of 'parent_id' is used to fill 'visualParent'. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of asset document. - """ - - if data is None: - data = {} - if parent_id is not None: - parent_id = ObjectId(parent_id) - data["visualParent"] = parent_id - data["parents"] = parents - - return { - "_id": _create_or_convert_to_mongo_id(entity_id), - "type": "asset", - "name": name, - "parent": ObjectId(project_id), - "data": data, - "schema": CURRENT_ASSET_DOC_SCHEMA - } - - -def new_subset_document(name, family, asset_id, data=None, entity_id=None): - """Create skeleton data of subset document. - - Args: - name (str): Is considered as unique identifier of subset under asset. - family (str): Subset's family. - asset_id (Union[str, ObjectId]): Id of parent asset. - data (Dict[str, Any]): Subset document data. Empty dictionary is used - if not passed. Value of 'family' is used to fill 'family'. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of subset document. - """ - - if data is None: - data = {} - data["family"] = family - return { - "_id": _create_or_convert_to_mongo_id(entity_id), - "schema": CURRENT_SUBSET_SCHEMA, - "type": "subset", - "name": name, - "data": data, - "parent": asset_id - } - - -def new_version_doc(version, subset_id, data=None, entity_id=None): - """Create skeleton data of version document. - - Args: - version (int): Is considered as unique identifier of version - under subset. - subset_id (Union[str, ObjectId]): Id of parent subset. - data (Dict[str, Any]): Version document data. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of version document. - """ - - if data is None: - data = {} - - return { - "_id": _create_or_convert_to_mongo_id(entity_id), - "schema": CURRENT_VERSION_SCHEMA, - "type": "version", - "name": int(version), - "parent": subset_id, - "data": data - } - - -def new_hero_version_doc(version_id, subset_id, data=None, entity_id=None): - """Create skeleton data of hero version document. - - Args: - version_id (ObjectId): Is considered as unique identifier of version - under subset. - subset_id (Union[str, ObjectId]): Id of parent subset. - data (Dict[str, Any]): Version document data. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of version document. - """ - - if data is None: - data = {} - - return { - "_id": _create_or_convert_to_mongo_id(entity_id), - "schema": CURRENT_HERO_VERSION_SCHEMA, - "type": "hero_version", - "version_id": version_id, - "parent": subset_id, - "data": data - } - - -def new_representation_doc( - name, version_id, context, data=None, entity_id=None -): - """Create skeleton data of asset document. - - Args: - version (int): Is considered as unique identifier of version - under subset. - version_id (Union[str, ObjectId]): Id of parent version. - context (Dict[str, Any]): Representation context used for fill template - of to query. - data (Dict[str, Any]): Representation document data. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of version document. - """ - - if data is None: - data = {} - - return { - "_id": _create_or_convert_to_mongo_id(entity_id), - "schema": CURRENT_REPRESENTATION_SCHEMA, - "type": "representation", - "parent": version_id, - "name": name, - "data": data, - # Imprint shortcut to context for performance reasons. - "context": context - } - - -def new_thumbnail_doc(data=None, entity_id=None): - """Create skeleton data of thumbnail document. - - Args: - data (Dict[str, Any]): Thumbnail document data. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of thumbnail document. - """ - - if data is None: - data = {} - - return { - "_id": _create_or_convert_to_mongo_id(entity_id), - "type": "thumbnail", - "schema": CURRENT_THUMBNAIL_SCHEMA, - "data": data - } - - -def new_workfile_info_doc( - filename, asset_id, task_name, files, data=None, entity_id=None -): - """Create skeleton data of workfile info document. - - Workfile document is at this moment used primarily for artist notes. - - Args: - filename (str): Filename of workfile. - asset_id (Union[str, ObjectId]): Id of asset under which workfile live. - task_name (str): Task under which was workfile created. - files (List[str]): List of rootless filepaths related to workfile. - data (Dict[str, Any]): Additional metadata. - - Returns: - Dict[str, Any]: Skeleton of workfile info document. - """ - - if not data: - data = {} - - return { - "_id": _create_or_convert_to_mongo_id(entity_id), - "type": "workfile", - "parent": ObjectId(asset_id), - "task_name": task_name, - "filename": filename, - "data": data, - "files": files - } - - -def _prepare_update_data(old_doc, new_doc, replace): - changes = {} - for key, value in new_doc.items(): - if key not in old_doc or value != old_doc[key]: - changes[key] = value - - if replace: - for key in old_doc.keys(): - if key not in new_doc: - changes[key] = REMOVED_VALUE - return changes - - -def prepare_subset_update_data(old_doc, new_doc, replace=True): - """Compare two subset documents and prepare update data. - - Based on compared values will create update data for 'UpdateOperation'. - - Empty output means that documents are identical. - - Returns: - Dict[str, Any]: Changes between old and new document. - """ - - return _prepare_update_data(old_doc, new_doc, replace) - - -def prepare_version_update_data(old_doc, new_doc, replace=True): - """Compare two version documents and prepare update data. - - Based on compared values will create update data for 'UpdateOperation'. - - Empty output means that documents are identical. - - Returns: - Dict[str, Any]: Changes between old and new document. - """ - - return _prepare_update_data(old_doc, new_doc, replace) - - -def prepare_hero_version_update_data(old_doc, new_doc, replace=True): - """Compare two hero version documents and prepare update data. - - Based on compared values will create update data for 'UpdateOperation'. - - Empty output means that documents are identical. - - Returns: - Dict[str, Any]: Changes between old and new document. - """ - - return _prepare_update_data(old_doc, new_doc, replace) - - -def prepare_representation_update_data(old_doc, new_doc, replace=True): - """Compare two representation documents and prepare update data. - - Based on compared values will create update data for 'UpdateOperation'. - - Empty output means that documents are identical. - - Returns: - Dict[str, Any]: Changes between old and new document. - """ - - return _prepare_update_data(old_doc, new_doc, replace) - - -def prepare_workfile_info_update_data(old_doc, new_doc, replace=True): - """Compare two workfile info documents and prepare update data. - - Based on compared values will create update data for 'UpdateOperation'. - - Empty output means that documents are identical. - - Returns: - Dict[str, Any]: Changes between old and new document. - """ - - return _prepare_update_data(old_doc, new_doc, replace) - - -@six.add_metaclass(ABCMeta) -class AbstractOperation(object): - """Base operation class. - - Operation represent a call into database. The call can create, change or - remove data. - - Args: - project_name (str): On which project operation will happen. - entity_type (str): Type of entity on which change happens. - e.g. 'asset', 'representation' etc. - """ - - def __init__(self, project_name, entity_type): - self._project_name = project_name - self._entity_type = entity_type - self._id = str(uuid.uuid4()) - - @property - def project_name(self): - return self._project_name - - @property - def id(self): - """Identifier of operation.""" - - return self._id - - @property - def entity_type(self): - return self._entity_type - - @abstractproperty - def operation_name(self): - """Stringified type of operation.""" - - pass - - @abstractmethod - def to_mongo_operation(self): - """Convert operation to Mongo batch operation.""" - - pass - - def to_data(self): - """Convert operation to data that can be converted to json or others. - - Warning: - Current state returns ObjectId objects which cannot be parsed by - json. - - Returns: - Dict[str, Any]: Description of operation. - """ - - return { - "id": self._id, - "entity_type": self.entity_type, - "project_name": self.project_name, - "operation": self.operation_name - } - - -class CreateOperation(AbstractOperation): - """Operation to create an entity. - - Args: - project_name (str): On which project operation will happen. - entity_type (str): Type of entity on which change happens. - e.g. 'asset', 'representation' etc. - data (Dict[str, Any]): Data of entity that will be created. - """ - - operation_name = "create" - - def __init__(self, project_name, entity_type, data): - super(CreateOperation, self).__init__(project_name, entity_type) - - if not data: - data = {} - else: - data = copy.deepcopy(dict(data)) - - if "_id" not in data: - data["_id"] = ObjectId() - else: - data["_id"] = ObjectId(data["_id"]) - - self._entity_id = data["_id"] - self._data = data - - def __setitem__(self, key, value): - self.set_value(key, value) - - def __getitem__(self, key): - return self.data[key] - - def set_value(self, key, value): - self.data[key] = value - - def get(self, key, *args, **kwargs): - return self.data.get(key, *args, **kwargs) - - @property - def entity_id(self): - return self._entity_id - - @property - def data(self): - return self._data - - def to_mongo_operation(self): - return InsertOne(copy.deepcopy(self._data)) - - def to_data(self): - output = super(CreateOperation, self).to_data() - output["data"] = copy.deepcopy(self.data) - return output - - -class UpdateOperation(AbstractOperation): - """Operation to update an entity. - - Args: - project_name (str): On which project operation will happen. - entity_type (str): Type of entity on which change happens. - e.g. 'asset', 'representation' etc. - entity_id (Union[str, ObjectId]): Identifier of an entity. - update_data (Dict[str, Any]): Key -> value changes that will be set in - database. If value is set to 'REMOVED_VALUE' the key will be - removed. Only first level of dictionary is checked (on purpose). - """ - - operation_name = "update" - - def __init__(self, project_name, entity_type, entity_id, update_data): - super(UpdateOperation, self).__init__(project_name, entity_type) - - self._entity_id = ObjectId(entity_id) - self._update_data = update_data - - @property - def entity_id(self): - return self._entity_id - - @property - def update_data(self): - return self._update_data - - def to_mongo_operation(self): - unset_data = {} - set_data = {} - for key, value in self._update_data.items(): - if value is REMOVED_VALUE: - unset_data[key] = None - else: - set_data[key] = value - - op_data = {} - if unset_data: - op_data["$unset"] = unset_data - if set_data: - op_data["$set"] = set_data - - if not op_data: - return None - - return UpdateOne( - {"_id": self.entity_id}, - op_data - ) - - def to_data(self): - changes = {} - for key, value in self._update_data.items(): - if value is REMOVED_VALUE: - value = None - changes[key] = value - - output = super(UpdateOperation, self).to_data() - output.update({ - "entity_id": self.entity_id, - "changes": changes - }) - return output - - -class DeleteOperation(AbstractOperation): - """Operation to delete an entity. - - Args: - project_name (str): On which project operation will happen. - entity_type (str): Type of entity on which change happens. - e.g. 'asset', 'representation' etc. - entity_id (Union[str, ObjectId]): Entity id that will be removed. - """ - - operation_name = "delete" - - def __init__(self, project_name, entity_type, entity_id): - super(DeleteOperation, self).__init__(project_name, entity_type) - - self._entity_id = ObjectId(entity_id) - - @property - def entity_id(self): - return self._entity_id - - def to_mongo_operation(self): - return DeleteOne({"_id": self.entity_id}) - - def to_data(self): - output = super(DeleteOperation, self).to_data() - output["entity_id"] = self.entity_id - return output - - -class OperationsSession(object): - """Session storing operations that should happen in an order. - - At this moment does not handle anything special can be sonsidered as - stupid list of operations that will happen after each other. If creation - of same entity is there multiple times it's handled in any way and document - values are not validated. - - All operations must be related to single project. - - Args: - project_name (str): Project name to which are operations related. - """ - - def __init__(self): - self._operations = [] - - def add(self, operation): - """Add operation to be processed. - - Args: - operation (BaseOperation): Operation that should be processed. - """ - if not isinstance( - operation, - (CreateOperation, UpdateOperation, DeleteOperation) - ): - raise TypeError("Expected Operation object got {}".format( - str(type(operation)) - )) - - self._operations.append(operation) - - def append(self, operation): - """Add operation to be processed. - - Args: - operation (BaseOperation): Operation that should be processed. - """ - - self.add(operation) - - def extend(self, operations): - """Add operations to be processed. - - Args: - operations (List[BaseOperation]): Operations that should be - processed. - """ - - for operation in operations: - self.add(operation) - - def remove(self, operation): - """Remove operation.""" - - self._operations.remove(operation) - - def clear(self): - """Clear all registered operations.""" - - self._operations = [] - - def to_data(self): - return [ - operation.to_data() - for operation in self._operations - ] - - def commit(self): - """Commit session operations.""" - - operations, self._operations = self._operations, [] - if not operations: - return - - operations_by_project = collections.defaultdict(list) - for operation in operations: - operations_by_project[operation.project_name].append(operation) - - for project_name, operations in operations_by_project.items(): - bulk_writes = [] - for operation in operations: - mongo_op = operation.to_mongo_operation() - if mongo_op is not None: - bulk_writes.append(mongo_op) - - if bulk_writes: - collection = get_project_connection(project_name) - collection.bulk_write(bulk_writes) - - def create_entity(self, project_name, entity_type, data): - """Fast access to 'CreateOperation'. - - Returns: - CreateOperation: Object of update operation. - """ - - operation = CreateOperation(project_name, entity_type, data) - self.add(operation) - return operation - - def update_entity(self, project_name, entity_type, entity_id, update_data): - """Fast access to 'UpdateOperation'. - - Returns: - UpdateOperation: Object of update operation. - """ - - operation = UpdateOperation( - project_name, entity_type, entity_id, update_data - ) - self.add(operation) - return operation - - def delete_entity(self, project_name, entity_type, entity_id): - """Fast access to 'DeleteOperation'. - - Returns: - DeleteOperation: Object of delete operation. - """ - - operation = DeleteOperation(project_name, entity_type, entity_id) - self.add(operation) - return operation - - -def create_project( - project_name, - project_code, - library_project=False, -): - """Create project using OpenPype settings. - - This project creation function is not validating project document on - creation. It is because project document is created blindly with only - minimum required information about project which is it's name, code, type - and schema. - - Entered project name must be unique and project must not exist yet. - - Note: - This function is here to be OP v4 ready but in v3 has more logic - to do. That's why inner imports are in the body. - - Args: - project_name(str): New project name. Should be unique. - project_code(str): Project's code should be unique too. - library_project(bool): Project is library project. - - Raises: - ValueError: When project name already exists in MongoDB. - - Returns: - dict: Created project document. - """ - - from openpype.settings import ProjectSettings, SaveWarningExc - from openpype.pipeline.schema import validate - - if get_project(project_name, fields=["name"]): - raise ValueError("Project with name \"{}\" already exists".format( - project_name - )) - - if not PROJECT_NAME_REGEX.match(project_name): - raise ValueError(( - "Project name \"{}\" contain invalid characters" - ).format(project_name)) - - project_doc = { - "type": "project", - "name": project_name, - "data": { - "code": project_code, - "library_project": library_project, - }, - "schema": CURRENT_PROJECT_SCHEMA - } - - op_session = OperationsSession() - # Insert document with basic data - create_op = op_session.create_entity( - project_name, project_doc["type"], project_doc +else: + from ayon_api.server_api import ( + PROJECT_NAME_ALLOWED_SYMBOLS, + PROJECT_NAME_REGEX, + ) + from .server.operations import * + from .mongo.operations import ( + CURRENT_PROJECT_SCHEMA, + CURRENT_PROJECT_CONFIG_SCHEMA, + CURRENT_ASSET_DOC_SCHEMA, + CURRENT_SUBSET_SCHEMA, + CURRENT_VERSION_SCHEMA, + CURRENT_HERO_VERSION_SCHEMA, + CURRENT_REPRESENTATION_SCHEMA, + CURRENT_WORKFILE_INFO_SCHEMA, + CURRENT_THUMBNAIL_SCHEMA ) - op_session.commit() - - # Load ProjectSettings for the project and save it to store all attributes - # and Anatomy - try: - project_settings_entity = ProjectSettings(project_name) - project_settings_entity.save() - except SaveWarningExc as exc: - print(str(exc)) - except Exception: - op_session.delete_entity( - project_name, project_doc["type"], create_op.entity_id - ) - op_session.commit() - raise - - project_doc = get_project(project_name) - - try: - # Validate created project document - validate(project_doc) - except Exception: - # Remove project if is not valid - op_session.delete_entity( - project_name, project_doc["type"], create_op.entity_id - ) - op_session.commit() - raise - - return project_doc diff --git a/openpype/client/operations_base.py b/openpype/client/operations_base.py new file mode 100644 index 0000000000..887b237b1c --- /dev/null +++ b/openpype/client/operations_base.py @@ -0,0 +1,289 @@ +import uuid +import copy +from abc import ABCMeta, abstractmethod, abstractproperty +import six + +REMOVED_VALUE = object() + + +@six.add_metaclass(ABCMeta) +class AbstractOperation(object): + """Base operation class. + + Operation represent a call into database. The call can create, change or + remove data. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + """ + + def __init__(self, project_name, entity_type): + self._project_name = project_name + self._entity_type = entity_type + self._id = str(uuid.uuid4()) + + @property + def project_name(self): + return self._project_name + + @property + def id(self): + """Identifier of operation.""" + + return self._id + + @property + def entity_type(self): + return self._entity_type + + @abstractproperty + def operation_name(self): + """Stringified type of operation.""" + + pass + + def to_data(self): + """Convert operation to data that can be converted to json or others. + + Warning: + Current state returns ObjectId objects which cannot be parsed by + json. + + Returns: + Dict[str, Any]: Description of operation. + """ + + return { + "id": self._id, + "entity_type": self.entity_type, + "project_name": self.project_name, + "operation": self.operation_name + } + + +class CreateOperation(AbstractOperation): + """Operation to create an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + data (Dict[str, Any]): Data of entity that will be created. + """ + + operation_name = "create" + + def __init__(self, project_name, entity_type, data): + super(CreateOperation, self).__init__(project_name, entity_type) + + if not data: + data = {} + else: + data = copy.deepcopy(dict(data)) + self._data = data + + def __setitem__(self, key, value): + self.set_value(key, value) + + def __getitem__(self, key): + return self.data[key] + + def set_value(self, key, value): + self.data[key] = value + + def get(self, key, *args, **kwargs): + return self.data.get(key, *args, **kwargs) + + @abstractproperty + def entity_id(self): + pass + + @property + def data(self): + return self._data + + def to_data(self): + output = super(CreateOperation, self).to_data() + output["data"] = copy.deepcopy(self.data) + return output + + +class UpdateOperation(AbstractOperation): + """Operation to update an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + entity_id (Union[str, ObjectId]): Identifier of an entity. + update_data (Dict[str, Any]): Key -> value changes that will be set in + database. If value is set to 'REMOVED_VALUE' the key will be + removed. Only first level of dictionary is checked (on purpose). + """ + + operation_name = "update" + + def __init__(self, project_name, entity_type, entity_id, update_data): + super(UpdateOperation, self).__init__(project_name, entity_type) + + self._entity_id = entity_id + self._update_data = update_data + + @property + def entity_id(self): + return self._entity_id + + @property + def update_data(self): + return self._update_data + + def to_data(self): + changes = {} + for key, value in self._update_data.items(): + if value is REMOVED_VALUE: + value = None + changes[key] = value + + output = super(UpdateOperation, self).to_data() + output.update({ + "entity_id": self.entity_id, + "changes": changes + }) + return output + + +class DeleteOperation(AbstractOperation): + """Operation to delete an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + entity_id (Union[str, ObjectId]): Entity id that will be removed. + """ + + operation_name = "delete" + + def __init__(self, project_name, entity_type, entity_id): + super(DeleteOperation, self).__init__(project_name, entity_type) + + self._entity_id = entity_id + + @property + def entity_id(self): + return self._entity_id + + def to_data(self): + output = super(DeleteOperation, self).to_data() + output["entity_id"] = self.entity_id + return output + + +class BaseOperationsSession(object): + """Session storing operations that should happen in an order. + + At this moment does not handle anything special can be considered as + stupid list of operations that will happen after each other. If creation + of same entity is there multiple times it's handled in any way and document + values are not validated. + """ + + def __init__(self): + self._operations = [] + + def __len__(self): + return len(self._operations) + + def add(self, operation): + """Add operation to be processed. + + Args: + operation (BaseOperation): Operation that should be processed. + """ + if not isinstance( + operation, + (CreateOperation, UpdateOperation, DeleteOperation) + ): + raise TypeError("Expected Operation object got {}".format( + str(type(operation)) + )) + + self._operations.append(operation) + + def append(self, operation): + """Add operation to be processed. + + Args: + operation (BaseOperation): Operation that should be processed. + """ + + self.add(operation) + + def extend(self, operations): + """Add operations to be processed. + + Args: + operations (List[BaseOperation]): Operations that should be + processed. + """ + + for operation in operations: + self.add(operation) + + def remove(self, operation): + """Remove operation.""" + + self._operations.remove(operation) + + def clear(self): + """Clear all registered operations.""" + + self._operations = [] + + def to_data(self): + return [ + operation.to_data() + for operation in self._operations + ] + + @abstractmethod + def commit(self): + """Commit session operations.""" + pass + + def create_entity(self, project_name, entity_type, data): + """Fast access to 'CreateOperation'. + + Returns: + CreateOperation: Object of update operation. + """ + + operation = CreateOperation(project_name, entity_type, data) + self.add(operation) + return operation + + def update_entity(self, project_name, entity_type, entity_id, update_data): + """Fast access to 'UpdateOperation'. + + Returns: + UpdateOperation: Object of update operation. + """ + + operation = UpdateOperation( + project_name, entity_type, entity_id, update_data + ) + self.add(operation) + return operation + + def delete_entity(self, project_name, entity_type, entity_id): + """Fast access to 'DeleteOperation'. + + Returns: + DeleteOperation: Object of delete operation. + """ + + operation = DeleteOperation(project_name, entity_type, entity_id) + self.add(operation) + return operation diff --git a/openpype/client/server/__init__.py b/openpype/client/server/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/client/server/constants.py b/openpype/client/server/constants.py new file mode 100644 index 0000000000..7ff990dc60 --- /dev/null +++ b/openpype/client/server/constants.py @@ -0,0 +1,83 @@ +# --- Project --- +DEFAULT_PROJECT_FIELDS = { + "active", + "name", + "code", + "config", + "data", + "createdAt", +} + +# --- Folders --- +DEFAULT_FOLDER_FIELDS = { + "id", + "name", + "path", + "parentId", + "active", + "parents", + "thumbnailId" +} + +# --- Tasks --- +DEFAULT_TASK_FIELDS = { + "id", + "name", + "taskType", + "assignees", +} + +# --- Subsets --- +DEFAULT_SUBSET_FIELDS = { + "id", + "name", + "active", + "family", + "folderId", +} + +# --- Versions --- +DEFAULT_VERSION_FIELDS = { + "id", + "name", + "version", + "active", + "subsetId", + "taskId", + "author", + "thumbnailId", + "createdAt", + "updatedAt", +} + +# --- Representations --- +DEFAULT_REPRESENTATION_FIELDS = { + "id", + "name", + "context", + "createdAt", + "active", + "versionId", +} + +REPRESENTATION_FILES_FIELDS = { + "files.name", + "files.hash", + "files.id", + "files.path", + "files.size", +} + +DEFAULT_WORKFILE_INFO_FIELDS = { + "active", + "createdAt", + "createdBy", + "id", + "name", + "path", + "projectName", + "taskId", + "thumbnailId", + "updatedAt", + "updatedBy", +} diff --git a/openpype/client/server/conversion_utils.py b/openpype/client/server/conversion_utils.py new file mode 100644 index 0000000000..e8c2ee9c3c --- /dev/null +++ b/openpype/client/server/conversion_utils.py @@ -0,0 +1,1244 @@ +import os +import datetime +import collections +import json + +import six + +from openpype.client.operations_base import REMOVED_VALUE +from openpype.client.mongo.operations import ( + CURRENT_PROJECT_SCHEMA, + CURRENT_ASSET_DOC_SCHEMA, + CURRENT_SUBSET_SCHEMA, + CURRENT_VERSION_SCHEMA, + CURRENT_HERO_VERSION_SCHEMA, + CURRENT_REPRESENTATION_SCHEMA, + CURRENT_WORKFILE_INFO_SCHEMA, +) +from .constants import REPRESENTATION_FILES_FIELDS +from .utils import create_entity_id, prepare_entity_changes + +# --- Project entity --- +PROJECT_FIELDS_MAPPING_V3_V4 = { + "_id": {"name"}, + "name": {"name"}, + "data": {"data", "code"}, + "data.library_project": {"library"}, + "data.code": {"code"}, + "data.active": {"active"}, +} + +# TODO this should not be hardcoded but received from server!!! +# --- Folder entity --- +FOLDER_FIELDS_MAPPING_V3_V4 = { + "_id": {"id"}, + "name": {"name"}, + "label": {"label"}, + "data": { + "parentId", "parents", "active", "tasks", "thumbnailId" + }, + "data.visualParent": {"parentId"}, + "data.parents": {"parents"}, + "data.active": {"active"}, + "data.thumbnail_id": {"thumbnailId"}, + "data.entityType": {"folderType"} +} + +# --- Subset entity --- +SUBSET_FIELDS_MAPPING_V3_V4 = { + "_id": {"id"}, + "name": {"name"}, + "data.active": {"active"}, + "parent": {"folderId"} +} + +# --- Version entity --- +VERSION_FIELDS_MAPPING_V3_V4 = { + "_id": {"id"}, + "name": {"version"}, + "parent": {"subsetId"} +} + +# --- Representation entity --- +REPRESENTATION_FIELDS_MAPPING_V3_V4 = { + "_id": {"id"}, + "name": {"name"}, + "parent": {"versionId"}, + "context": {"context"}, + "files": {"files"}, +} + + +def project_fields_v3_to_v4(fields, con): + """Convert project fields from v3 to v4 structure. + + Args: + fields (Union[Iterable(str), None]): fields to be converted. + + Returns: + Union[Set(str), None]: Converted fields to v4 fields. + """ + + # TODO config fields + # - config.apps + # - config.groups + if not fields: + return None + + project_attribs = con.get_attributes_for_type("project") + output = set() + for field in fields: + # If config is needed the rest api call must be used + if field.startswith("config"): + return None + + if field in PROJECT_FIELDS_MAPPING_V3_V4: + output |= PROJECT_FIELDS_MAPPING_V3_V4[field] + if field == "data": + output |= { + "attrib.{}".format(attr) + for attr in project_attribs + } + + elif field.startswith("data"): + field_parts = field.split(".") + field_parts.pop(0) + data_key = ".".join(field_parts) + if data_key in project_attribs: + output.add("attrib.{}".format(data_key)) + else: + output.add("data") + print("Requested specific key from data {}".format(data_key)) + + else: + raise ValueError("Unknown field mapping for {}".format(field)) + + if "name" not in output: + output.add("name") + return output + + +def _get_default_template_name(templates): + default_template = None + for name, template in templates.items(): + if name == "default": + return "default" + + if default_template is None: + default_template = template["name"] + + return default_template + + +def _convert_template_item(template): + template["folder"] = template.pop("directory") + template["path"] = "/".join( + (template["folder"], template["file"]) + ) + + +def _fill_template_category(templates, cat_templates, cat_key): + default_template_name = _get_default_template_name(cat_templates) + for template_name, cat_template in cat_templates.items(): + _convert_template_item(cat_template) + if template_name == default_template_name: + templates[cat_key] = cat_template + else: + new_name = "{}_{}".format(cat_key, template_name) + templates["others"][new_name] = cat_template + + +def convert_v4_project_to_v3(project): + """Convert Project entity data from v4 structure to v3 structure. + + Args: + project (Dict[str, Any]): Project entity queried from v4 server. + + Returns: + Dict[str, Any]: Project converted to v3 structure. + """ + + if not project: + return project + + project_name = project["name"] + output = { + "_id": project_name, + "name": project_name, + "schema": CURRENT_PROJECT_SCHEMA, + "type": "project" + } + + data = project.get("data") or {} + attribs = project.get("attrib") or {} + apps_attr = attribs.pop("applications", None) or [] + applications = [ + {"name": app_name} + for app_name in apps_attr + ] + data.update(attribs) + data["entityType"] = "Project" + + config = {} + project_config = project.get("config") + + if project_config: + config["apps"] = applications + config["roots"] = project_config["roots"] + + templates = project_config["templates"] + templates["defaults"] = templates.pop("common", None) or {} + + others_templates = templates.pop("others", None) or {} + new_others_templates = {} + templates["others"] = new_others_templates + for name, template in others_templates.items(): + _convert_template_item(template) + new_others_templates[name] = template + + for key in ( + "work", + "publish", + "hero" + ): + cat_templates = templates.pop(key) + _fill_template_category(templates, cat_templates, key) + + delivery_templates = templates.pop("delivery", None) or {} + new_delivery_templates = {} + for name, delivery_template in delivery_templates.items(): + new_delivery_templates[name] = "/".join( + (delivery_template["directory"], delivery_template["file"]) + ) + templates["delivery"] = new_delivery_templates + + config["templates"] = templates + + if "taskTypes" in project: + task_types = project["taskTypes"] + new_task_types = {} + for task_type in task_types: + name = task_type.pop("name") + new_task_types[name] = task_type + + config["tasks"] = new_task_types + + if config: + output["config"] = config + + for data_key, key in ( + ("library_project", "library"), + ("code", "code"), + ("active", "active") + ): + if key in project: + data[data_key] = project[key] + + if "attrib" in project: + for key, value in project["attrib"].items(): + data[key] = value + + if data: + output["data"] = data + return output + + +def folder_fields_v3_to_v4(fields, con): + """Convert folder fields from v3 to v4 structure. + + Args: + fields (Union[Iterable(str), None]): fields to be converted. + + Returns: + Union[Set(str), None]: Converted fields to v4 fields. + """ + + if not fields: + return None + + folder_attributes = con.get_attributes_for_type("folder") + output = set() + for field in fields: + if field in ("schema", "type", "parent"): + continue + + if field in FOLDER_FIELDS_MAPPING_V3_V4: + output |= FOLDER_FIELDS_MAPPING_V3_V4[field] + if field == "data": + output |= { + "attrib.{}".format(attr) + for attr in folder_attributes + } + + elif field.startswith("data"): + field_parts = field.split(".") + field_parts.pop(0) + data_key = ".".join(field_parts) + if data_key == "label": + output.add("name") + + elif data_key in ("icon", "color"): + continue + + elif data_key.startswith("tasks"): + output.add("tasks") + + elif data_key in folder_attributes: + output.add("attrib.{}".format(data_key)) + + else: + output.add("data") + print("Requested specific key from data {}".format(data_key)) + + else: + raise ValueError("Unknown field mapping for {}".format(field)) + + if "id" not in output: + output.add("id") + return output + + +def convert_v4_tasks_to_v3(tasks): + """Convert v4 task item to v3 task. + + Args: + tasks (List[Dict[str, Any]]): Task entites. + + Returns: + Dict[str, Dict[str, Any]]: Tasks in v3 variant ready for v3 asset. + """ + + output = {} + for task in tasks: + task_name = task["name"] + new_task = { + "type": task["taskType"] + } + output[task_name] = new_task + return output + + +def convert_v4_folder_to_v3(folder, project_name): + """Convert v4 folder to v3 asset. + + Args: + folder (Dict[str, Any]): Folder entity data. + project_name (str): Project name from which folder was queried. + + Returns: + Dict[str, Any]: Converted v4 folder to v3 asset. + """ + + output = { + "_id": folder["id"], + "parent": project_name, + "type": "asset", + "schema": CURRENT_ASSET_DOC_SCHEMA + } + + output_data = folder.get("data") or {} + + if "name" in folder: + output["name"] = folder["name"] + output_data["label"] = folder["name"] + + if "folderType" in folder: + output_data["entityType"] = folder["folderType"] + + for src_key, dst_key in ( + ("parentId", "visualParent"), + ("active", "active"), + ("thumbnailId", "thumbnail_id"), + ("parents", "parents"), + ): + if src_key in folder: + output_data[dst_key] = folder[src_key] + + if "attrib" in folder: + output_data.update(folder["attrib"]) + + if "tasks" in folder: + output_data["tasks"] = convert_v4_tasks_to_v3(folder["tasks"]) + + output["data"] = output_data + + return output + + +def subset_fields_v3_to_v4(fields, con): + """Convert subset fields from v3 to v4 structure. + + Args: + fields (Union[Iterable(str), None]): fields to be converted. + + Returns: + Union[Set(str), None]: Converted fields to v4 fields. + """ + + if not fields: + return None + + subset_attributes = con.get_attributes_for_type("subset") + + output = set() + for field in fields: + if field in ("schema", "type"): + continue + + if field in SUBSET_FIELDS_MAPPING_V3_V4: + output |= SUBSET_FIELDS_MAPPING_V3_V4[field] + + elif field == "data": + output.add("family") + output.add("active") + output |= { + "attrib.{}".format(attr) + for attr in subset_attributes + } + + elif field.startswith("data"): + field_parts = field.split(".") + field_parts.pop(0) + data_key = ".".join(field_parts) + if data_key in ("family", "families"): + output.add("family") + + elif data_key in subset_attributes: + output.add("attrib.{}".format(data_key)) + + else: + output.add("data") + print("Requested specific key from data {}".format(data_key)) + + else: + raise ValueError("Unknown field mapping for {}".format(field)) + + if "id" not in output: + output.add("id") + return output + + +def convert_v4_subset_to_v3(subset): + output = { + "_id": subset["id"], + "type": "subset", + "schema": CURRENT_SUBSET_SCHEMA + } + if "folderId" in subset: + output["parent"] = subset["folderId"] + + output_data = subset.get("data") or {} + + if "name" in subset: + output["name"] = subset["name"] + + if "active" in subset: + output_data["active"] = subset["active"] + + if "attrib" in subset: + attrib = subset["attrib"] + output_data.update(attrib) + + family = subset.get("family") + if family: + output_data["family"] = family + output_data["families"] = [family] + + output["data"] = output_data + + return output + + +def version_fields_v3_to_v4(fields, con): + """Convert version fields from v3 to v4 structure. + + Args: + fields (Union[Iterable(str), None]): fields to be converted. + + Returns: + Union[Set(str), None]: Converted fields to v4 fields. + """ + + if not fields: + return None + + version_attributes = con.get_attributes_for_type("version") + + output = set() + for field in fields: + if field in ("type", "schema", "version_id"): + continue + + if field in VERSION_FIELDS_MAPPING_V3_V4: + output |= VERSION_FIELDS_MAPPING_V3_V4[field] + + elif field == "data": + output |= { + "attrib.{}".format(attr) + for attr in version_attributes + } + output |= { + "author", + "createdAt", + "thumbnailId", + } + + elif field.startswith("data"): + field_parts = field.split(".") + field_parts.pop(0) + data_key = ".".join(field_parts) + if data_key in version_attributes: + output.add("attrib.{}".format(data_key)) + + elif data_key == "thumbnail_id": + output.add("thumbnailId") + + elif data_key == "time": + output.add("createdAt") + + elif data_key == "author": + output.add("author") + + elif data_key in ("tags", ): + continue + + else: + output.add("data") + print("Requested specific key from data {}".format(data_key)) + + else: + raise ValueError("Unknown field mapping for {}".format(field)) + + if "id" not in output: + output.add("id") + return output + + +def convert_v4_version_to_v3(version): + """Convert v4 version entity to v4 version. + + Args: + version (Dict[str, Any]): Queried v4 version entity. + + Returns: + Dict[str, Any]: Conveted version entity to v3 structure. + """ + + version_num = version["version"] + if version_num < 0: + output = { + "_id": version["id"], + "type": "hero_version", + "schema": CURRENT_HERO_VERSION_SCHEMA, + } + if "subsetId" in version: + output["parent"] = version["subsetId"] + + if "data" in version: + output["data"] = version["data"] + return output + + output = { + "_id": version["id"], + "type": "version", + "name": version_num, + "schema": CURRENT_VERSION_SCHEMA + } + if "subsetId" in version: + output["parent"] = version["subsetId"] + + output_data = version.get("data") or {} + if "attrib" in version: + output_data.update(version["attrib"]) + + for src_key, dst_key in ( + ("active", "active"), + ("thumbnailId", "thumbnail_id"), + ("author", "author") + ): + if src_key in version: + output_data[dst_key] = version[src_key] + + if "createdAt" in version: + created_at = datetime.datetime.fromisoformat(version["createdAt"]) + output_data["time"] = created_at.strftime("%Y%m%dT%H%M%SZ") + + output["data"] = output_data + + return output + + +def representation_fields_v3_to_v4(fields, con): + """Convert representation fields from v3 to v4 structure. + + Args: + fields (Union[Iterable(str), None]): fields to be converted. + + Returns: + Union[Set(str), None]: Converted fields to v4 fields. + """ + + if not fields: + return None + + representation_attributes = con.get_attributes_for_type("representation") + + output = set() + for field in fields: + if field in ("type", "schema"): + continue + + if field in REPRESENTATION_FIELDS_MAPPING_V3_V4: + output |= REPRESENTATION_FIELDS_MAPPING_V3_V4[field] + + elif field.startswith("context"): + output.add("context") + + # TODO: 'files' can have specific attributes but the keys in v3 and v4 + # are not the same (content is not the same) + elif field.startswith("files"): + output |= REPRESENTATION_FILES_FIELDS + + elif field.startswith("data"): + fields |= { + "attrib.{}".format(attr) + for attr in representation_attributes + } + + else: + raise ValueError("Unknown field mapping for {}".format(field)) + + if "id" not in output: + output.add("id") + return output + + +def convert_v4_representation_to_v3(representation): + """Convert v4 representation to v3 representation. + + Args: + representation (Dict[str, Any]): Queried representation from v4 server. + + Returns: + Dict[str, Any]: Converted representation to v3 structure. + """ + + output = { + "type": "representation", + "schema": CURRENT_REPRESENTATION_SCHEMA, + } + if "id" in representation: + output["_id"] = representation["id"] + + for v3_key, v4_key in ( + ("name", "name"), + ("parent", "versionId") + ): + if v4_key in representation: + output[v3_key] = representation[v4_key] + + if "context" in representation: + context = representation["context"] + if isinstance(context, six.string_types): + context = json.loads(context) + output["context"] = context + + if "files" in representation: + files = representation["files"] + new_files = [] + # From GraphQl is list + if isinstance(files, list): + for file_info in files: + file_info["_id"] = file_info["id"] + new_files.append(file_info) + + # From RestPoint is dictionary + elif isinstance(files, dict): + for file_id, file_info in files: + file_info["_id"] = file_id + new_files.append(file_info) + + if not new_files: + new_files.append({ + "name": "studio" + }) + output["files"] = new_files + + if representation.get("active") is False: + output["type"] = "archived_representation" + output["old_id"] = output["_id"] + + output_data = representation.get("data") or {} + if "attrib" in representation: + output_data.update(representation["attrib"]) + + for key, data_key in ( + ("active", "active"), + ): + if key in representation: + output_data[data_key] = representation[key] + + output["data"] = output_data + + return output + + +def workfile_info_fields_v3_to_v4(fields): + if not fields: + return None + + new_fields = set() + fields = set(fields) + for v3_key, v4_key in ( + ("_id", "id"), + ("files", "path"), + ("filename", "name"), + ("data", "data"), + ): + if v3_key in fields: + new_fields.add(v4_key) + + if "parent" in fields or "task_name" in fields: + new_fields.add("taskId") + + return new_fields + + +def convert_v4_workfile_info_to_v3(workfile_info, task): + output = { + "type": "representation", + "schema": CURRENT_WORKFILE_INFO_SCHEMA, + } + if "id" in workfile_info: + output["_id"] = workfile_info["id"] + + if "path" in workfile_info: + output["files"] = [workfile_info["path"]] + + if "name" in workfile_info: + output["filename"] = workfile_info["name"] + + if "taskId" in workfile_info: + output["task_name"] = task["name"] + output["parent"] = task["folderId"] + + return output + + +def convert_create_asset_to_v4(asset, project, con): + folder_attributes = con.get_attributes_for_type("folder") + + asset_data = asset["data"] + parent_id = asset_data["visualParent"] + + folder = { + "name": asset["name"], + "parentId": parent_id, + } + entity_id = asset.get("_id") + if entity_id: + folder["id"] = entity_id + + attribs = {} + data = {} + for key, value in asset_data.items(): + if key in ( + "visualParent", + "thumbnail_id", + "parents", + "inputLinks", + "avalon_mongo_id", + ): + continue + + if key not in folder_attributes: + data[key] = value + elif value is not None: + attribs[key] = value + + if attribs: + folder["attrib"] = attribs + + if data: + folder["data"] = data + return folder + + +def convert_create_task_to_v4(task, project, con): + if not project["taskTypes"]: + raise ValueError( + "Project \"{}\" does not have any task types".format( + project["name"])) + + task_type = task["type"] + if task_type not in project["taskTypes"]: + task_type = tuple(project["taskTypes"].keys())[0] + + return { + "name": task["name"], + "taskType": task_type, + "folderId": task["folderId"] + } + + +def convert_create_subset_to_v4(subset, con): + subset_attributes = con.get_attributes_for_type("subset") + + subset_data = subset["data"] + family = subset_data.get("family") + if not family: + family = subset_data["families"][0] + + converted_subset = { + "name": subset["name"], + "family": family, + "folderId": subset["parent"], + } + entity_id = subset.get("_id") + if entity_id: + converted_subset["id"] = entity_id + + attribs = {} + data = {} + for key, value in subset_data.items(): + if key not in subset_attributes: + data[key] = value + elif value is not None: + attribs[key] = value + + if attribs: + converted_subset["attrib"] = attribs + + if data: + converted_subset["data"] = data + + return converted_subset + + +def convert_create_version_to_v4(version, con): + version_attributes = con.get_attributes_for_type("version") + converted_version = { + "version": version["name"], + "subsetId": version["parent"], + } + entity_id = version.get("_id") + if entity_id: + converted_version["id"] = entity_id + + version_data = version["data"] + attribs = {} + data = {} + for key, value in version_data.items(): + if key not in version_attributes: + data[key] = value + elif value is not None: + attribs[key] = value + + if attribs: + converted_version["attrib"] = attribs + + if data: + converted_version["data"] = attribs + + return converted_version + + +def convert_create_hero_version_to_v4(hero_version, project_name, con): + if "version_id" in hero_version: + version_id = hero_version["version_id"] + version = con.get_version_by_id(project_name, version_id) + version["version"] = - version["version"] + + for auto_key in ( + "name", + "createdAt", + "updatedAt", + "author", + ): + version.pop(auto_key, None) + + return version + + version_attributes = con.get_attributes_for_type("version") + converted_version = { + "version": hero_version["version"], + "subsetId": hero_version["parent"], + } + entity_id = hero_version.get("_id") + if entity_id: + converted_version["id"] = entity_id + + version_data = hero_version["data"] + attribs = {} + data = {} + for key, value in version_data.items(): + if key not in version_attributes: + data[key] = value + elif value is not None: + attribs[key] = value + + if attribs: + converted_version["attrib"] = attribs + + if data: + converted_version["data"] = attribs + + return converted_version + + +def convert_create_representation_to_v4(representation, con): + representation_attributes = con.get_attributes_for_type("representation") + + converted_representation = { + "name": representation["name"], + "versionId": representation["parent"], + } + entity_id = representation.get("_id") + if entity_id: + converted_representation["id"] = entity_id + + if representation.get("type") == "archived_representation": + converted_representation["active"] = False + + new_files = {} + for file_item in representation["files"]: + new_file_item = { + key: value + for key, value in file_item.items() + if key != "_id" + } + file_item_id = create_entity_id() + new_files[file_item_id] = new_file_item + + attribs = {} + data = { + "files": new_files, + "context": representation["context"] + } + + representation_data = representation["data"] + + for key, value in representation_data.items(): + if key not in representation_attributes: + data[key] = value + elif value is not None: + attribs[key] = value + + if attribs: + converted_representation["attrib"] = attribs + + if data: + converted_representation["data"] = data + + return converted_representation + + +def convert_create_workfile_info_to_v4(data, project_name, con): + folder_id = data["parent"] + task_name = data["task_name"] + task = con.get_task_by_name(project_name, folder_id, task_name) + if not task: + return None + + workfile_attributes = con.get_attributes_for_type("workfile") + filename = data["filename"] + possible_attribs = { + "extension": os.path.splitext(filename)[-1] + } + attribs = {} + for attr in workfile_attributes: + if attr in possible_attribs: + attribs[attr] = possible_attribs[attr] + + output = { + "path": data["files"][0], + "name": filename, + "taskId": task["id"] + } + if "_id" in data: + output["id"] = data["_id"] + + if attribs: + output["attrib"] = attribs + + output_data = data.get("data") + if output_data: + output["data"] = output_data + return output + + +def _from_flat_dict(data): + output = {} + for key, value in data.items(): + output_value = output + subkeys = key.split(".") + last_key = subkeys.pop(-1) + for subkey in subkeys: + if subkey not in output_value: + output_value[subkey] = {} + output_value = output_value[subkey] + + output_value[last_key] = value + return output + + +def _to_flat_dict(data): + output = {} + flat_queue = collections.deque() + flat_queue.append(([], data)) + while flat_queue: + item = flat_queue.popleft() + parent_keys, data = item + for key, value in data.items(): + keys = list(parent_keys) + keys.append(key) + if isinstance(value, dict): + flat_queue.append((keys, value)) + else: + full_key = ".".join(keys) + output[full_key] = value + + return output + + +def convert_update_folder_to_v4(project_name, asset_id, update_data, con): + new_update_data = {} + + folder_attributes = con.get_attributes_for_type("folder") + full_update_data = _from_flat_dict(update_data) + data = full_update_data.get("data") + + has_new_parent = False + has_task_changes = False + parent_id = None + tasks = None + new_data = {} + attribs = {} + if "type" in update_data: + new_update_data["active"] = update_data["type"] == "asset" + + if data: + if "thumbnail_id" in data: + new_update_data["thumbnailId"] = data.pop("thumbnail_id") + + if "tasks" in data: + tasks = data.pop("tasks") + has_task_changes = True + + if "visualParent" in data: + has_new_parent = True + parent_id = data.pop("visualParent") + + for key, value in data.items(): + if key in folder_attributes: + attribs[key] = value + else: + new_data[key] = value + + if "name" in update_data: + new_update_data["name"] = update_data["name"] + + if "type" in update_data: + new_type = update_data["type"] + if new_type == "asset": + new_update_data["active"] = True + elif new_type == "archived_asset": + new_update_data["active"] = False + + if has_new_parent: + new_update_data["parentId"] = parent_id + + if new_data: + print("Folder has new data: {}".format(new_data)) + new_update_data["data"] = new_data + + if has_task_changes: + raise ValueError("Task changes of folder are not implemented") + + return _to_flat_dict(new_update_data) + + +def convert_update_subset_to_v4(project_name, subset_id, update_data, con): + new_update_data = {} + + subset_attributes = con.get_attributes_for_type("subset") + full_update_data = _from_flat_dict(update_data) + data = full_update_data.get("data") + new_data = {} + attribs = {} + if data: + if "family" in data: + family = data.pop("family") + new_update_data["family"] = family + + if "families" in data: + families = data.pop("families") + if "family" not in new_update_data: + new_update_data["family"] = families[0] + + for key, value in data.items(): + if key in subset_attributes: + if value is REMOVED_VALUE: + value = None + attribs[key] = value + + elif value is not REMOVED_VALUE: + new_data[key] = value + + if attribs: + new_update_data["attribs"] = attribs + + if "name" in update_data: + new_update_data["name"] = update_data["name"] + + if "type" in update_data: + new_type = update_data["type"] + if new_type == "subset": + new_update_data["active"] = True + elif new_type == "archived_subset": + new_update_data["active"] = False + + if "parent" in update_data: + new_update_data["folderId"] = update_data["parent"] + + flat_data = _to_flat_dict(new_update_data) + if new_data: + print("Subset has new data: {}".format(new_data)) + flat_data["data"] = new_data + + return flat_data + + +def convert_update_version_to_v4(project_name, version_id, update_data, con): + new_update_data = {} + + version_attributes = con.get_attributes_for_type("version") + full_update_data = _from_flat_dict(update_data) + data = full_update_data.get("data") + new_data = {} + attribs = {} + if data: + if "author" in data: + new_update_data["author"] = data.pop("author") + + if "thumbnail_id" in data: + new_update_data["thumbnailId"] = data.pop("thumbnail_id") + + for key, value in data.items(): + if key in version_attributes: + if value is REMOVED_VALUE: + value = None + attribs[key] = value + + elif value is not REMOVED_VALUE: + new_data[key] = value + + if attribs: + new_update_data["attribs"] = attribs + + if "name" in update_data: + new_update_data["version"] = update_data["name"] + + if "type" in update_data: + new_type = update_data["type"] + if new_type == "version": + new_update_data["active"] = True + elif new_type == "archived_version": + new_update_data["active"] = False + + if "parent" in update_data: + new_update_data["subsetId"] = update_data["parent"] + + flat_data = _to_flat_dict(new_update_data) + if new_data: + print("Version has new data: {}".format(new_data)) + flat_data["data"] = new_data + return flat_data + + +def convert_update_hero_version_to_v4( + project_name, hero_version_id, update_data, con +): + if "version_id" not in update_data: + return None + + version_id = update_data["version_id"] + hero_version = con.get_hero_version_by_id(project_name, hero_version_id) + version = con.get_version_by_id(project_name, version_id) + version["version"] = - version["version"] + version["id"] = hero_version_id + + for auto_key in ( + "name", + "createdAt", + "updatedAt", + "author", + ): + version.pop(auto_key, None) + + return prepare_entity_changes(hero_version, version) + + +def convert_update_representation_to_v4( + project_name, repre_id, update_data, con +): + new_update_data = {} + + folder_attributes = con.get_attributes_for_type("folder") + full_update_data = _from_flat_dict(update_data) + data = full_update_data.get("data") + + new_data = {} + attribs = {} + if data: + for key, value in data.items(): + if key in folder_attributes: + attribs[key] = value + else: + new_data[key] = value + + if "name" in update_data: + new_update_data["name"] = update_data["name"] + + if "type" in update_data: + new_type = update_data["type"] + if new_type == "representation": + new_update_data["active"] = True + elif new_type == "archived_representation": + new_update_data["active"] = False + + if "parent" in update_data: + new_update_data["versionId"] = update_data["parent"] + + if "context" in update_data: + new_data["context"] = update_data["context"] + + if "files" in update_data: + new_files = update_data["files"] + if isinstance(new_files, list): + _new_files = {} + for file_item in new_files: + _file_item = { + key: value + for key, value in file_item.items() + if key != "_id" + } + file_item_id = create_entity_id() + _new_files[file_item_id] = _file_item + new_files = _new_files + new_data["files"] = new_files + + flat_data = _to_flat_dict(new_update_data) + if new_data: + print("Representation has new data: {}".format(new_data)) + flat_data["data"] = new_data + + return flat_data + + +def convert_update_workfile_info_to_v4(update_data): + return { + key: value + for key, value in update_data.items() + if key.startswith("data") + } diff --git a/openpype/client/server/entities.py b/openpype/client/server/entities.py new file mode 100644 index 0000000000..5dc8af9a6d --- /dev/null +++ b/openpype/client/server/entities.py @@ -0,0 +1,655 @@ +import collections + +from ayon_api import get_server_api_connection + +from openpype.client.mongo.operations import CURRENT_THUMBNAIL_SCHEMA + +from .openpype_comp import get_folders_with_tasks +from .conversion_utils import ( + project_fields_v3_to_v4, + convert_v4_project_to_v3, + + folder_fields_v3_to_v4, + convert_v4_folder_to_v3, + + subset_fields_v3_to_v4, + convert_v4_subset_to_v3, + + version_fields_v3_to_v4, + convert_v4_version_to_v3, + + representation_fields_v3_to_v4, + convert_v4_representation_to_v3, + + workfile_info_fields_v3_to_v4, + convert_v4_workfile_info_to_v3, +) + + +def get_projects(active=True, inactive=False, library=None, fields=None): + if not active and not inactive: + return + + if active and inactive: + active = None + elif active: + active = True + elif inactive: + active = False + + con = get_server_api_connection() + fields = project_fields_v3_to_v4(fields, con) + for project in con.get_projects(active, library, fields=fields): + yield convert_v4_project_to_v3(project) + + +def get_project(project_name, active=True, inactive=False, fields=None): + # Skip if both are disabled + con = get_server_api_connection() + fields = project_fields_v3_to_v4(fields, con) + return convert_v4_project_to_v3( + con.get_project(project_name, fields=fields) + ) + + +def get_whole_project(*args, **kwargs): + raise NotImplementedError("'get_whole_project' not implemented") + + +def _get_subsets( + project_name, + subset_ids=None, + subset_names=None, + folder_ids=None, + names_by_folder_ids=None, + archived=False, + fields=None +): + # Convert fields and add minimum required fields + con = get_server_api_connection() + fields = subset_fields_v3_to_v4(fields, con) + if fields is not None: + for key in ( + "id", + "active" + ): + fields.add(key) + + active = None + if archived: + active = False + + for subset in con.get_subsets( + project_name, + subset_ids, + subset_names, + folder_ids, + names_by_folder_ids, + active, + fields + ): + yield convert_v4_subset_to_v3(subset) + + +def _get_versions( + project_name, + version_ids=None, + subset_ids=None, + versions=None, + hero=True, + standard=True, + latest=None, + fields=None +): + con = get_server_api_connection() + + fields = version_fields_v3_to_v4(fields, con) + + # Make sure 'subsetId' and 'version' are available when hero versions + # are queried + if fields and hero: + fields = set(fields) + fields |= {"subsetId", "version"} + + queried_versions = con.get_versions( + project_name, + version_ids, + subset_ids, + versions, + hero, + standard, + latest, + fields=fields + ) + + versions = [] + hero_versions = [] + for version in queried_versions: + if version["version"] < 0: + hero_versions.append(version) + else: + versions.append(convert_v4_version_to_v3(version)) + + if hero_versions: + subset_ids = set() + versions_nums = set() + for hero_version in hero_versions: + versions_nums.add(abs(hero_version["version"])) + subset_ids.add(hero_version["subsetId"]) + + hero_eq_versions = con.get_versions( + project_name, + subset_ids=subset_ids, + versions=versions_nums, + hero=False, + fields=["id", "version", "subsetId"] + ) + hero_eq_by_subset_id = collections.defaultdict(list) + for version in hero_eq_versions: + hero_eq_by_subset_id[version["subsetId"]].append(version) + + for hero_version in hero_versions: + abs_version = abs(hero_version["version"]) + subset_id = hero_version["subsetId"] + version_id = None + for version in hero_eq_by_subset_id.get(subset_id, []): + if version["version"] == abs_version: + version_id = version["id"] + break + conv_hero = convert_v4_version_to_v3(hero_version) + conv_hero["version_id"] = version_id + versions.append(conv_hero) + + return versions + + +def get_asset_by_id(project_name, asset_id, fields=None): + assets = get_assets( + project_name, asset_ids=[asset_id], fields=fields + ) + for asset in assets: + return asset + return None + + +def get_asset_by_name(project_name, asset_name, fields=None): + assets = get_assets( + project_name, asset_names=[asset_name], fields=fields + ) + for asset in assets: + return asset + return None + + +def get_assets( + project_name, + asset_ids=None, + asset_names=None, + parent_ids=None, + archived=False, + fields=None +): + if not project_name: + return + + active = True + if archived: + active = False + + con = get_server_api_connection() + fields = folder_fields_v3_to_v4(fields, con) + kwargs = dict( + folder_ids=asset_ids, + folder_names=asset_names, + parent_ids=parent_ids, + active=active, + fields=fields + ) + + if fields is None or "tasks" in fields: + folders = get_folders_with_tasks(con, project_name, **kwargs) + + else: + folders = con.get_folders(project_name, **kwargs) + + for folder in folders: + yield convert_v4_folder_to_v3(folder, project_name) + + +def get_archived_assets(*args, **kwargs): + raise NotImplementedError("'get_archived_assets' not implemented") + + +def get_asset_ids_with_subsets(project_name, asset_ids=None): + con = get_server_api_connection() + return con.get_asset_ids_with_subsets(project_name, asset_ids) + + +def get_subset_by_id(project_name, subset_id, fields=None): + subsets = get_subsets( + project_name, subset_ids=[subset_id], fields=fields + ) + for subset in subsets: + return subset + return None + + +def get_subset_by_name(project_name, subset_name, asset_id, fields=None): + subsets = get_subsets( + project_name, + subset_names=[subset_name], + asset_ids=[asset_id], + fields=fields + ) + for subset in subsets: + return subset + return None + + +def get_subsets( + project_name, + subset_ids=None, + subset_names=None, + asset_ids=None, + names_by_asset_ids=None, + archived=False, + fields=None +): + return _get_subsets( + project_name, + subset_ids, + subset_names, + asset_ids, + names_by_asset_ids, + archived, + fields=fields + ) + + +def get_subset_families(project_name, subset_ids=None): + con = get_server_api_connection() + return con.get_subset_families(project_name, subset_ids) + + +def get_version_by_id(project_name, version_id, fields=None): + versions = get_versions( + project_name, + version_ids=[version_id], + fields=fields, + hero=True + ) + for version in versions: + return version + return None + + +def get_version_by_name(project_name, version, subset_id, fields=None): + versions = get_versions( + project_name, + subset_ids=[subset_id], + versions=[version], + fields=fields + ) + for version in versions: + return version + return None + + +def get_versions( + project_name, + version_ids=None, + subset_ids=None, + versions=None, + hero=False, + fields=None +): + return _get_versions( + project_name, + version_ids, + subset_ids, + versions, + hero=hero, + standard=True, + fields=fields + ) + + +def get_hero_version_by_id(project_name, version_id, fields=None): + versions = get_hero_versions( + project_name, + version_ids=[version_id], + fields=fields + ) + for version in versions: + return version + return None + + +def get_hero_version_by_subset_id( + project_name, subset_id, fields=None +): + versions = get_hero_versions( + project_name, + subset_ids=[subset_id], + fields=fields + ) + for version in versions: + return version + return None + + +def get_hero_versions( + project_name, subset_ids=None, version_ids=None, fields=None +): + return _get_versions( + project_name, + version_ids=version_ids, + subset_ids=subset_ids, + hero=True, + standard=False, + fields=fields + ) + + +def get_last_versions(project_name, subset_ids, fields=None): + if fields: + fields = set(fields) + fields.add("parent") + + versions = _get_versions( + project_name, + subset_ids=subset_ids, + latest=True, + hero=False, + fields=fields + ) + return { + version["parent"]: version + for version in versions + } + + +def get_last_version_by_subset_id(project_name, subset_id, fields=None): + versions = _get_versions( + project_name, + subset_ids=[subset_id], + latest=True, + hero=False, + fields=fields + ) + if not versions: + return versions[0] + return None + + +def get_last_version_by_subset_name( + project_name, + subset_name, + asset_id=None, + asset_name=None, + fields=None +): + if not asset_id and not asset_name: + return None + + if not asset_id: + asset = get_asset_by_name( + project_name, asset_name, fields=["_id"] + ) + if not asset: + return None + asset_id = asset["_id"] + + subset = get_subset_by_name( + project_name, subset_name, asset_id, fields=["_id"] + ) + if not subset: + return None + return get_last_version_by_subset_id( + project_name, subset["id"], fields=fields + ) + + +def get_output_link_versions(*args, **kwargs): + raise NotImplementedError("'get_output_link_versions' not implemented") + + +def version_is_latest(project_name, version_id): + con = get_server_api_connection() + return con.version_is_latest(project_name, version_id) + + +def get_representation_by_id(project_name, representation_id, fields=None): + representations = get_representations( + project_name, + representation_ids=[representation_id], + fields=fields + ) + for representation in representations: + return representation + return None + + +def get_representation_by_name( + project_name, representation_name, version_id, fields=None +): + representations = get_representations( + project_name, + representation_names=[representation_name], + version_ids=[version_id], + fields=fields + ) + for representation in representations: + return representation + return None + + +def get_representations( + project_name, + representation_ids=None, + representation_names=None, + version_ids=None, + context_filters=None, + names_by_version_ids=None, + archived=False, + standard=True, + fields=None +): + if context_filters is not None: + # TODO should we add the support? + # - there was ability to fitler using regex + raise ValueError("OP v4 can't filter by representation context.") + + if not archived and not standard: + return + + if archived and not standard: + active = False + elif not archived and standard: + active = True + else: + active = None + + con = get_server_api_connection() + fields = representation_fields_v3_to_v4(fields, con) + if fields and active is not None: + fields.add("active") + + representations = con.get_representations( + project_name, + representation_ids, + representation_names, + version_ids, + names_by_version_ids, + active, + fields=fields + ) + for representation in representations: + yield convert_v4_representation_to_v3(representation) + + +def get_representation_parents(project_name, representation): + if not representation: + return None + + repre_id = representation["_id"] + parents_by_repre_id = get_representations_parents( + project_name, [representation] + ) + return parents_by_repre_id[repre_id] + + +def get_representations_parents(project_name, representations): + repre_ids = { + repre["_id"] + for repre in representations + } + con = get_server_api_connection() + parents_by_repre_id = con.get_representations_parents(project_name, + repre_ids) + folder_ids = set() + for parents in parents_by_repre_id .values(): + folder_ids.add(parents[2]["id"]) + + tasks_by_folder_id = {} + + new_parents = {} + for repre_id, parents in parents_by_repre_id .items(): + version, subset, folder, project = parents + folder_tasks = tasks_by_folder_id.get(folder["id"]) or {} + folder["tasks"] = folder_tasks + new_parents[repre_id] = ( + convert_v4_version_to_v3(version), + convert_v4_subset_to_v3(subset), + convert_v4_folder_to_v3(folder, project_name), + project + ) + return new_parents + + +def get_archived_representations( + project_name, + representation_ids=None, + representation_names=None, + version_ids=None, + context_filters=None, + names_by_version_ids=None, + fields=None +): + return get_representations( + project_name, + representation_ids=representation_ids, + representation_names=representation_names, + version_ids=version_ids, + context_filters=context_filters, + names_by_version_ids=names_by_version_ids, + archived=True, + standard=False, + fields=fields + ) + + +def get_thumbnail( + project_name, thumbnail_id, entity_type, entity_id, fields=None +): + """Receive thumbnail entity data. + + Args: + project_name (str): Name of project where to look for queried entities. + thumbnail_id (Union[str, ObjectId]): Id of thumbnail entity. + entity_type (str): Type of entity for which the thumbnail should be + received. + entity_id (str): Id of entity for which the thumbnail should be + received. + fields (Iterable[str]): Fields that should be returned. All fields are + returned if 'None' is passed. + + Returns: + None: If thumbnail with specified id was not found. + Dict: Thumbnail entity data which can be reduced to specified 'fields'. + """ + + if not thumbnail_id or not entity_type or not entity_id: + return None + + if entity_type == "asset": + entity_type = "folder" + + elif entity_type == "hero_version": + entity_type = "version" + + return { + "_id": thumbnail_id, + "type": "thumbnail", + "schema": CURRENT_THUMBNAIL_SCHEMA, + "data": { + "entity_type": entity_type, + "entity_id": entity_id + } + } + + +def get_thumbnails(project_name, thumbnail_contexts, fields=None): + thumbnail_items = set() + for thumbnail_context in thumbnail_contexts: + thumbnail_id, entity_type, entity_id = thumbnail_context + thumbnail_item = get_thumbnail( + project_name, thumbnail_id, entity_type, entity_id + ) + if thumbnail_item: + thumbnail_items.add(thumbnail_item) + return list(thumbnail_items) + + +def get_thumbnail_id_from_source(project_name, src_type, src_id): + """Receive thumbnail id from source entity. + + Args: + project_name (str): Name of project where to look for queried entities. + src_type (str): Type of source entity ('asset', 'version'). + src_id (Union[str, ObjectId]): Id of source entity. + + Returns: + ObjectId: Thumbnail id assigned to entity. + None: If Source entity does not have any thumbnail id assigned. + """ + + if not src_type or not src_id: + return None + + if src_type == "version": + version = get_version_by_id( + project_name, src_id, fields=["data.thumbnail_id"] + ) or {} + return version.get("data", {}).get("thumbnail_id") + + if src_type == "asset": + asset = get_asset_by_id( + project_name, src_id, fields=["data.thumbnail_id"] + ) or {} + return asset.get("data", {}).get("thumbnail_id") + + return None + + +def get_workfile_info( + project_name, asset_id, task_name, filename, fields=None +): + if not asset_id or not task_name or not filename: + return None + + con = get_server_api_connection() + task = con.get_task_by_name( + project_name, asset_id, task_name, fields=["id", "name", "folderId"] + ) + if not task: + return None + + fields = workfile_info_fields_v3_to_v4(fields) + + for workfile_info in con.get_workfiles_info( + project_name, task_ids=[task["id"]], fields=fields + ): + if workfile_info["name"] == filename: + return convert_v4_workfile_info_to_v3(workfile_info, task) + return None diff --git a/openpype/client/server/entity_links.py b/openpype/client/server/entity_links.py new file mode 100644 index 0000000000..f61b461f38 --- /dev/null +++ b/openpype/client/server/entity_links.py @@ -0,0 +1,65 @@ +def get_linked_asset_ids(project_name, asset_doc=None, asset_id=None): + """Extract linked asset ids from asset document. + + One of asset document or asset id must be passed. + + Note: + Asset links now works only from asset to assets. + + Args: + project_name (str): Project where to look for asset. + asset_doc (dict): Asset document from DB. + asset_id (str): Asset id to find its document. + + Returns: + List[Union[ObjectId, str]]: Asset ids of input links. + """ + + return [] + + +def get_linked_assets( + project_name, asset_doc=None, asset_id=None, fields=None +): + """Return linked assets based on passed asset document. + + One of asset document or asset id must be passed. + + Args: + project_name (str): Name of project where to look for queried entities. + asset_doc (Dict[str, Any]): Asset document from database. + asset_id (Union[ObjectId, str]): Asset id. Can be used instead of + asset document. + fields (Iterable[str]): Fields that should be returned. All fields are + returned if 'None' is passed. + + Returns: + List[Dict[str, Any]]: Asset documents of input links for passed + asset doc. + """ + + return [] + + +def get_linked_representation_id( + project_name, repre_doc=None, repre_id=None, link_type=None, max_depth=None +): + """Returns list of linked ids of particular type (if provided). + + One of representation document or representation id must be passed. + Note: + Representation links now works only from representation through version + back to representations. + + Args: + project_name (str): Name of project where look for links. + repre_doc (Dict[str, Any]): Representation document. + repre_id (Union[ObjectId, str]): Representation id. + link_type (str): Type of link (e.g. 'reference', ...). + max_depth (int): Limit recursion level. Default: 0 + + Returns: + List[ObjectId] Linked representation ids. + """ + + return [] diff --git a/openpype/client/server/openpype_comp.py b/openpype/client/server/openpype_comp.py new file mode 100644 index 0000000000..00ee0aae92 --- /dev/null +++ b/openpype/client/server/openpype_comp.py @@ -0,0 +1,156 @@ +import collections +from ayon_api.graphql import GraphQlQuery, FIELD_VALUE, fields_to_dict + +from .constants import DEFAULT_FOLDER_FIELDS + + +def folders_tasks_graphql_query(fields): + query = GraphQlQuery("FoldersQuery") + project_name_var = query.add_variable("projectName", "String!") + folder_ids_var = query.add_variable("folderIds", "[String!]") + parent_folder_ids_var = query.add_variable("parentFolderIds", "[String!]") + folder_paths_var = query.add_variable("folderPaths", "[String!]") + folder_names_var = query.add_variable("folderNames", "[String!]") + has_subsets_var = query.add_variable("folderHasSubsets", "Boolean!") + + project_field = query.add_field("project") + project_field.set_filter("name", project_name_var) + + folders_field = project_field.add_field("folders", has_edges=True) + folders_field.set_filter("ids", folder_ids_var) + folders_field.set_filter("parentIds", parent_folder_ids_var) + folders_field.set_filter("names", folder_names_var) + folders_field.set_filter("paths", folder_paths_var) + folders_field.set_filter("hasSubsets", has_subsets_var) + + fields = set(fields) + fields.discard("tasks") + tasks_field = folders_field.add_field("tasks", has_edges=True) + tasks_field.add_field("name") + tasks_field.add_field("taskType") + + nested_fields = fields_to_dict(fields) + + query_queue = collections.deque() + for key, value in nested_fields.items(): + query_queue.append((key, value, folders_field)) + + while query_queue: + item = query_queue.popleft() + key, value, parent = item + field = parent.add_field(key) + if value is FIELD_VALUE: + continue + + for k, v in value.items(): + query_queue.append((k, v, field)) + return query + + +def get_folders_with_tasks( + con, + project_name, + folder_ids=None, + folder_paths=None, + folder_names=None, + parent_ids=None, + active=True, + fields=None +): + """Query folders with tasks from server. + + This is for v4 compatibility where tasks were stored on assets. This is + an inefficient way how folders and tasks are queried so it was added only + as compatibility function. + + Todos: + Folder name won't be unique identifier, so we should add folder path + filtering. + + Notes: + Filter 'active' don't have direct filter in GraphQl. + + Args: + con (ServerAPI): Connection to server. + project_name (str): Name of project where folders are. + folder_ids (Iterable[str]): Folder ids to filter. + folder_paths (Iterable[str]): Folder paths used for filtering. + folder_names (Iterable[str]): Folder names used for filtering. + parent_ids (Iterable[str]): Ids of folder parents. Use 'None' + if folder is direct child of project. + active (Union[bool, None]): Filter active/inactive folders. Both + are returned if is set to None. + fields (Union[Iterable(str), None]): Fields to be queried + for folder. All possible folder fields are returned if 'None' + is passed. + + Returns: + List[Dict[str, Any]]: Queried folder entities. + """ + + if not project_name: + return [] + + filters = { + "projectName": project_name + } + if folder_ids is not None: + folder_ids = set(folder_ids) + if not folder_ids: + return [] + filters["folderIds"] = list(folder_ids) + + if folder_paths is not None: + folder_paths = set(folder_paths) + if not folder_paths: + return [] + filters["folderPaths"] = list(folder_paths) + + if folder_names is not None: + folder_names = set(folder_names) + if not folder_names: + return [] + filters["folderNames"] = list(folder_names) + + if parent_ids is not None: + parent_ids = set(parent_ids) + if not parent_ids: + return [] + if None in parent_ids: + # Replace 'None' with '"root"' which is used during GraphQl + # query for parent ids filter for folders without folder + # parent + parent_ids.remove(None) + parent_ids.add("root") + + if project_name in parent_ids: + # Replace project name with '"root"' which is used during + # GraphQl query for parent ids filter for folders without + # folder parent + parent_ids.remove(project_name) + parent_ids.add("root") + + filters["parentFolderIds"] = list(parent_ids) + + if fields: + fields = set(fields) + else: + fields = con.get_default_fields_for_type("folder") + fields |= DEFAULT_FOLDER_FIELDS + + if active is not None: + fields.add("active") + + query = folders_tasks_graphql_query(fields) + for attr, filter_value in filters.items(): + query.set_variable_value(attr, filter_value) + + parsed_data = query.query(con) + folders = parsed_data["project"]["folders"] + if active is None: + return folders + return [ + folder + for folder in folders + if folder["active"] is active + ] diff --git a/openpype/client/server/operations.py b/openpype/client/server/operations.py new file mode 100644 index 0000000000..6148f6a098 --- /dev/null +++ b/openpype/client/server/operations.py @@ -0,0 +1,863 @@ +import copy +import json +import collections +import uuid +import datetime + +from bson.objectid import ObjectId +from ayon_api import get_server_api_connection + +from openpype.client.operations_base import ( + REMOVED_VALUE, + CreateOperation, + UpdateOperation, + DeleteOperation, + BaseOperationsSession +) + +from openpype.client.mongo.operations import ( + CURRENT_THUMBNAIL_SCHEMA, + CURRENT_REPRESENTATION_SCHEMA, + CURRENT_HERO_VERSION_SCHEMA, + CURRENT_VERSION_SCHEMA, + CURRENT_SUBSET_SCHEMA, + CURRENT_ASSET_DOC_SCHEMA, + CURRENT_PROJECT_SCHEMA, +) + +from .conversion_utils import ( + convert_create_asset_to_v4, + convert_create_task_to_v4, + convert_create_subset_to_v4, + convert_create_version_to_v4, + convert_create_hero_version_to_v4, + convert_create_representation_to_v4, + convert_create_workfile_info_to_v4, + + convert_update_folder_to_v4, + convert_update_subset_to_v4, + convert_update_version_to_v4, + convert_update_hero_version_to_v4, + convert_update_representation_to_v4, + convert_update_workfile_info_to_v4, +) +from .utils import create_entity_id + + +def _create_or_convert_to_id(entity_id=None): + if entity_id is None: + return create_entity_id() + + if isinstance(entity_id, ObjectId): + raise TypeError("Type of 'ObjectId' is not supported anymore.") + + # Validate if can be converted to uuid + uuid.UUID(entity_id) + return entity_id + + +def new_project_document( + project_name, project_code, config, data=None, entity_id=None +): + """Create skeleton data of project document. + + Args: + project_name (str): Name of project. Used as identifier of a project. + project_code (str): Shorter version of projet without spaces and + special characters (in most of cases). Should be also considered + as unique name across projects. + config (Dic[str, Any]): Project config consist of roots, templates, + applications and other project Anatomy related data. + data (Dict[str, Any]): Project data with information about it's + attributes (e.g. 'fps' etc.) or integration specific keys. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of project document. + """ + + if data is None: + data = {} + + data["code"] = project_code + + return { + "_id": _create_or_convert_to_id(entity_id), + "name": project_name, + "type": CURRENT_PROJECT_SCHEMA, + "entity_data": data, + "config": config + } + + +def new_asset_document( + name, project_id, parent_id, parents, data=None, entity_id=None +): + """Create skeleton data of asset document. + + Args: + name (str): Is considered as unique identifier of asset in project. + project_id (Union[str, ObjectId]): Id of project doument. + parent_id (Union[str, ObjectId]): Id of parent asset. + parents (List[str]): List of parent assets names. + data (Dict[str, Any]): Asset document data. Empty dictionary is used + if not passed. Value of 'parent_id' is used to fill 'visualParent'. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of asset document. + """ + + if data is None: + data = {} + if parent_id is not None: + parent_id = _create_or_convert_to_id(parent_id) + data["visualParent"] = parent_id + data["parents"] = parents + + return { + "_id": _create_or_convert_to_id(entity_id), + "type": "asset", + "name": name, + # This will be ignored + "parent": project_id, + "data": data, + "schema": CURRENT_ASSET_DOC_SCHEMA + } + + +def new_subset_document(name, family, asset_id, data=None, entity_id=None): + """Create skeleton data of subset document. + + Args: + name (str): Is considered as unique identifier of subset under asset. + family (str): Subset's family. + asset_id (Union[str, ObjectId]): Id of parent asset. + data (Dict[str, Any]): Subset document data. Empty dictionary is used + if not passed. Value of 'family' is used to fill 'family'. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of subset document. + """ + + if data is None: + data = {} + data["family"] = family + return { + "_id": _create_or_convert_to_id(entity_id), + "schema": CURRENT_SUBSET_SCHEMA, + "type": "subset", + "name": name, + "data": data, + "parent": _create_or_convert_to_id(asset_id) + } + + +def new_version_doc(version, subset_id, data=None, entity_id=None): + """Create skeleton data of version document. + + Args: + version (int): Is considered as unique identifier of version + under subset. + subset_id (Union[str, ObjectId]): Id of parent subset. + data (Dict[str, Any]): Version document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_id(entity_id), + "schema": CURRENT_VERSION_SCHEMA, + "type": "version", + "name": int(version), + "parent": _create_or_convert_to_id(subset_id), + "data": data + } + + +def new_hero_version_doc(subset_id, data, version=None, entity_id=None): + """Create skeleton data of hero version document. + + Args: + subset_id (Union[str, ObjectId]): Id of parent subset. + data (Dict[str, Any]): Version document data. + version (int): Version of source version. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + + if version is None: + version = -1 + elif version > 0: + version = -version + + return { + "_id": _create_or_convert_to_id(entity_id), + "schema": CURRENT_HERO_VERSION_SCHEMA, + "type": "hero_version", + "version": version, + "parent": _create_or_convert_to_id(subset_id), + "data": data + } + + +def new_representation_doc( + name, version_id, context, data=None, entity_id=None +): + """Create skeleton data of representation document. + + Args: + name (str): Representation name considered as unique identifier + of representation under version. + version_id (Union[str, ObjectId]): Id of parent version. + context (Dict[str, Any]): Representation context used for fill template + of to query. + data (Dict[str, Any]): Representation document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_id(entity_id), + "schema": CURRENT_REPRESENTATION_SCHEMA, + "type": "representation", + "parent": _create_or_convert_to_id(version_id), + "name": name, + "data": data, + + # Imprint shortcut to context for performance reasons. + "context": context + } + + +def new_thumbnail_doc(data=None, entity_id=None): + """Create skeleton data of thumbnail document. + + Args: + data (Dict[str, Any]): Thumbnail document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of thumbnail document. + """ + + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_id(entity_id), + "type": "thumbnail", + "schema": CURRENT_THUMBNAIL_SCHEMA, + "data": data + } + + +def new_workfile_info_doc( + filename, asset_id, task_name, files, data=None, entity_id=None +): + """Create skeleton data of workfile info document. + + Workfile document is at this moment used primarily for artist notes. + + Args: + filename (str): Filename of workfile. + asset_id (Union[str, ObjectId]): Id of asset under which workfile live. + task_name (str): Task under which was workfile created. + files (List[str]): List of rootless filepaths related to workfile. + data (Dict[str, Any]): Additional metadata. + + Returns: + Dict[str, Any]: Skeleton of workfile info document. + """ + + if not data: + data = {} + + return { + "_id": _create_or_convert_to_id(entity_id), + "type": "workfile", + "parent": _create_or_convert_to_id(asset_id), + "task_name": task_name, + "filename": filename, + "data": data, + "files": files + } + + +def _prepare_update_data(old_doc, new_doc, replace): + changes = {} + for key, value in new_doc.items(): + if key not in old_doc or value != old_doc[key]: + changes[key] = value + + if replace: + for key in old_doc.keys(): + if key not in new_doc: + changes[key] = REMOVED_VALUE + return changes + + +def prepare_subset_update_data(old_doc, new_doc, replace=True): + """Compare two subset documents and prepare update data. + + Based on compared values will create update data for + 'MongoUpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_version_update_data(old_doc, new_doc, replace=True): + """Compare two version documents and prepare update data. + + Based on compared values will create update data for + 'MongoUpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_hero_version_update_data(old_doc, new_doc, replace=True): + """Compare two hero version documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_representation_update_data(old_doc, new_doc, replace=True): + """Compare two representation documents and prepare update data. + + Based on compared values will create update data for + 'MongoUpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_workfile_info_update_data(old_doc, new_doc, replace=True): + """Compare two workfile info documents and prepare update data. + + Based on compared values will create update data for + 'MongoUpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +class FailedOperations(Exception): + pass + + +def entity_data_json_default(value): + if isinstance(value, datetime.datetime): + return int(value.timestamp()) + + raise TypeError( + "Object of type {} is not JSON serializable".format(str(type(value))) + ) + + +def failed_json_default(value): + return "< Failed value {} > {}".format(type(value), str(value)) + + +class ServerCreateOperation(CreateOperation): + """Opeartion to create an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + data (Dict[str, Any]): Data of entity that will be created. + """ + + def __init__(self, project_name, entity_type, data, session): + self._session = session + + if not data: + data = {} + data = copy.deepcopy(data) + if entity_type == "project": + raise ValueError("Project cannot be created using operations") + + tasks = None + if entity_type in "asset": + # TODO handle tasks + entity_type = "folder" + if "data" in data: + tasks = data["data"].get("tasks") + + project = self._session.get_project(project_name) + new_data = convert_create_asset_to_v4(data, project, self.con) + + elif entity_type == "task": + project = self._session.get_project(project_name) + new_data = convert_create_task_to_v4(data, project, self.con) + + elif entity_type == "subset": + new_data = convert_create_subset_to_v4(data, self.con) + + elif entity_type == "version": + new_data = convert_create_version_to_v4(data, self.con) + + elif entity_type == "hero_version": + new_data = convert_create_hero_version_to_v4( + data, project_name, self.con + ) + entity_type = "version" + + elif entity_type in ("representation", "archived_representation"): + new_data = convert_create_representation_to_v4(data, self.con) + entity_type = "representation" + + elif entity_type == "workfile": + new_data = convert_create_workfile_info_to_v4( + data, project_name, self.con + ) + + else: + raise ValueError( + "Unhandled entity type \"{}\"".format(entity_type) + ) + + # Simple check if data can be dumped into json + # - should raise error on 'ObjectId' object + try: + new_data = json.loads( + json.dumps(new_data, default=entity_data_json_default) + ) + + except: + raise ValueError("Couldn't json parse body: {}".format( + json.dumps(new_data, default=failed_json_default) + )) + + super(ServerCreateOperation, self).__init__( + project_name, entity_type, new_data + ) + + if "id" not in self._data: + self._data["id"] = create_entity_id() + + if tasks: + copied_tasks = copy.deepcopy(tasks) + for task_name, task in copied_tasks.items(): + task["name"] = task_name + task["folderId"] = self._data["id"] + self.session.create_entity( + project_name, "task", task, nested_id=self.id + ) + + @property + def con(self): + return self.session.con + + @property + def session(self): + return self._session + + @property + def entity_id(self): + return self._data["id"] + + def to_server_operation(self): + return { + "id": self.id, + "type": "create", + "entityType": self.entity_type, + "entityId": self.entity_id, + "data": self._data + } + + +class ServerUpdateOperation(UpdateOperation): + """Operation to update an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + entity_id (Union[str, ObjectId]): Identifier of an entity. + update_data (Dict[str, Any]): Key -> value changes that will be set in + database. If value is set to 'REMOVED_VALUE' the key will be + removed. Only first level of dictionary is checked (on purpose). + """ + + def __init__( + self, project_name, entity_type, entity_id, update_data, session + ): + self._session = session + + update_data = copy.deepcopy(update_data) + if entity_type == "project": + raise ValueError("Project cannot be created using operations") + + if entity_type in ("asset", "archived_asset"): + new_update_data = convert_update_folder_to_v4( + project_name, entity_id, update_data, self.con + ) + entity_type = "folder" + + elif entity_type == "subset": + new_update_data = convert_update_subset_to_v4( + project_name, entity_id, update_data, self.con + ) + + elif entity_type == "version": + new_update_data = convert_update_version_to_v4( + project_name, entity_id, update_data, self.con + ) + + elif entity_type == "hero_version": + new_update_data = convert_update_hero_version_to_v4( + project_name, entity_id, update_data, self.con + ) + entity_type = "version" + + elif entity_type in ("representation", "archived_representation"): + new_update_data = convert_update_representation_to_v4( + project_name, entity_id, update_data, self.con + ) + entity_type = "representation" + + elif entity_type == "workfile": + new_update_data = convert_update_workfile_info_to_v4( + project_name, entity_id, update_data, self.con + ) + + else: + raise ValueError( + "Unhandled entity type \"{}\"".format(entity_type) + ) + + try: + new_update_data = json.loads( + json.dumps(new_update_data, default=entity_data_json_default) + ) + + except: + raise ValueError("Couldn't json parse body: {}".format( + json.dumps(new_update_data, default=failed_json_default) + )) + + super(ServerUpdateOperation, self).__init__( + project_name, entity_type, entity_id, new_update_data + ) + + @property + def con(self): + return self.session.con + + @property + def session(self): + return self._session + + def to_server_operation(self): + if not self._update_data: + return None + + update_data = {} + for key, value in self._update_data.items(): + if value is REMOVED_VALUE: + value = None + update_data[key] = value + + return { + "id": self.id, + "type": "update", + "entityType": self.entity_type, + "entityId": self.entity_id, + "data": update_data + } + + +class ServerDeleteOperation(DeleteOperation): + """Opeartion to delete an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + entity_id (Union[str, ObjectId]): Entity id that will be removed. + """ + + def __init__(self, project_name, entity_type, entity_id, session): + self._session = session + + if entity_type == "asset": + entity_type == "folder" + + if entity_type == "hero_version": + entity_type = "version" + + super(ServerDeleteOperation, self).__init__( + project_name, entity_type, entity_id + ) + + @property + def con(self): + return self.session.con + + @property + def session(self): + return self._session + + def to_server_operation(self): + return { + "id": self.id, + "type": self.operation_name, + "entityId": self.entity_id, + "entityType": self.entity_type, + } + + +class OperationsSession(BaseOperationsSession): + def __init__(self, con=None, *args, **kwargs): + super(OperationsSession, self).__init__(*args, **kwargs) + if con is None: + con = get_server_api_connection() + self._con = con + self._project_cache = {} + self._nested_operations = collections.defaultdict(list) + + @property + def con(self): + return self._con + + def get_project(self, project_name): + if project_name not in self._project_cache: + self._project_cache[project_name] = self.con.get_project( + project_name) + return copy.deepcopy(self._project_cache[project_name]) + + def commit(self): + """Commit session operations.""" + + operations, self._operations = self._operations, [] + if not operations: + return + + operations_by_project = collections.defaultdict(list) + for operation in operations: + operations_by_project[operation.project_name].append(operation) + + body_by_id = {} + results = [] + for project_name, operations in operations_by_project.items(): + operations_body = [] + for operation in operations: + body = operation.to_server_operation() + if body is not None: + try: + json.dumps(body) + except: + raise ValueError("Couldn't json parse body: {}".format( + json.dumps( + body, indent=4, default=failed_json_default + ) + )) + + body_by_id[operation.id] = body + operations_body.append(body) + + if operations_body: + result = self._con.post( + "projects/{}/operations".format(project_name), + operations=operations_body, + canFail=False + ) + results.append(result.data) + + for result in results: + if result.get("success"): + continue + + if "operations" not in result: + raise FailedOperations( + "Operation failed. Content: {}".format(str(result)) + ) + + for op_result in result["operations"]: + if not op_result["success"]: + operation_id = op_result["id"] + raise FailedOperations(( + "Operation \"{}\" failed with data:\n{}\nError: {}." + ).format( + operation_id, + json.dumps(body_by_id[operation_id], indent=4), + op_result.get("error", "unknown"), + )) + + def create_entity(self, project_name, entity_type, data, nested_id=None): + """Fast access to 'ServerCreateOperation'. + + Args: + project_name (str): On which project the creation happens. + entity_type (str): Which entity type will be created. + data (Dicst[str, Any]): Entity data. + nested_id (str): Id of other operation from which is triggered + operation -> Operations can trigger suboperations but they + must be added to operations list after it's parent is added. + + Returns: + ServerCreateOperation: Object of update operation. + """ + + operation = ServerCreateOperation( + project_name, entity_type, data, self + ) + + if nested_id: + self._nested_operations[nested_id].append(operation) + else: + self.add(operation) + if operation.id in self._nested_operations: + self.extend(self._nested_operations.pop(operation.id)) + + return operation + + def update_entity( + self, project_name, entity_type, entity_id, update_data, nested_id=None + ): + """Fast access to 'ServerUpdateOperation'. + + Returns: + ServerUpdateOperation: Object of update operation. + """ + + operation = ServerUpdateOperation( + project_name, entity_type, entity_id, update_data, self + ) + if nested_id: + self._nested_operations[nested_id].append(operation) + else: + self.add(operation) + if operation.id in self._nested_operations: + self.extend(self._nested_operations.pop(operation.id)) + return operation + + def delete_entity( + self, project_name, entity_type, entity_id, nested_id=None + ): + """Fast access to 'ServerDeleteOperation'. + + Returns: + ServerDeleteOperation: Object of delete operation. + """ + + operation = ServerDeleteOperation( + project_name, entity_type, entity_id, self + ) + if nested_id: + self._nested_operations[nested_id].append(operation) + else: + self.add(operation) + if operation.id in self._nested_operations: + self.extend(self._nested_operations.pop(operation.id)) + return operation + + +def create_project( + project_name, + project_code, + library_project=False, + preset_name=None, + con=None +): + """Create project using OpenPype settings. + + This project creation function is not validating project document on + creation. It is because project document is created blindly with only + minimum required information about project which is it's name, code, type + and schema. + + Entered project name must be unique and project must not exist yet. + + Note: + This function is here to be OP v4 ready but in v3 has more logic + to do. That's why inner imports are in the body. + + Args: + project_name (str): New project name. Should be unique. + project_code (str): Project's code should be unique too. + library_project (bool): Project is library project. + preset_name (str): Name of anatomy preset. Default is used if not + passed. + con (ServerAPI): Connection to server with logged user. + + Raises: + ValueError: When project name already exists in MongoDB. + + Returns: + dict: Created project document. + """ + + if con is None: + con = get_server_api_connection() + + return con.create_project( + project_name, + project_code, + library_project, + preset_name + ) + + +def delete_project(project_name, con=None): + if con is None: + con = get_server_api_connection() + + return con.delete_project(project_name) + + +def create_thumbnail(project_name, src_filepath, con=None): + if con is None: + con = get_server_api_connection() + return con.create_thumbnail(project_name, src_filepath) diff --git a/openpype/client/server/utils.py b/openpype/client/server/utils.py new file mode 100644 index 0000000000..ed128cfad9 --- /dev/null +++ b/openpype/client/server/utils.py @@ -0,0 +1,109 @@ +import uuid + +from openpype.client.operations_base import REMOVED_VALUE + + +def create_entity_id(): + return uuid.uuid1().hex + + +def prepare_attribute_changes(old_entity, new_entity, replace=False): + """Prepare changes of attributes on entities. + + Compare 'attrib' of old and new entity data to prepare only changed + values that should be sent to server for update. + + Example: + >>> # Limited entity data to 'attrib' + >>> old_entity = { + ... "attrib": {"attr_1": 1, "attr_2": "MyString", "attr_3": True} + ... } + >>> new_entity = { + ... "attrib": {"attr_1": 2, "attr_3": True, "attr_4": 3} + ... } + >>> # Changes if replacement should not happen + >>> expected_changes = { + ... "attr_1": 2, + ... "attr_4": 3 + ... } + >>> changes = prepare_attribute_changes(old_entity, new_entity) + >>> changes == expected_changes + True + + >>> # Changes if replacement should happen + >>> expected_changes_replace = { + ... "attr_1": 2, + ... "attr_2": REMOVED_VALUE, + ... "attr_4": 3 + ... } + >>> changes_replace = prepare_attribute_changes( + ... old_entity, new_entity, True) + >>> changes_replace == expected_changes_replace + True + + Args: + old_entity (dict[str, Any]): Data of entity queried from server. + new_entity (dict[str, Any]): Entity data with applied changes. + replace (bool): New entity should fully replace all old entity values. + + Returns: + Dict[str, Any]: Values from new entity only if value has changed. + """ + + attrib_changes = {} + new_attrib = new_entity.get("attrib") + old_attrib = old_entity.get("attrib") + if new_attrib is None: + if not replace: + return attrib_changes + new_attrib = {} + + if old_attrib is None: + return new_attrib + + for attr, new_attr_value in new_attrib.items(): + old_attr_value = old_attrib.get(attr) + if old_attr_value != new_attr_value: + attrib_changes[attr] = new_attr_value + + if replace: + for attr in old_attrib: + if attr not in new_attrib: + attrib_changes[attr] = REMOVED_VALUE + + return attrib_changes + + +def prepare_entity_changes(old_entity, new_entity, replace=False): + """Prepare changes of AYON entities. + + Compare old and new entity to filter values from new data that changed. + + Args: + old_entity (dict[str, Any]): Data of entity queried from server. + new_entity (dict[str, Any]): Entity data with applied changes. + replace (bool): All attributes should be replaced by new values. So + all attribute values that are not on new entity will be removed. + + Returns: + Dict[str, Any]: Only values from new entity that changed. + """ + + changes = {} + for key, new_value in new_entity.items(): + if key == "attrib": + continue + + old_value = old_entity.get(key) + if old_value != new_value: + changes[key] = new_value + + if replace: + for key in old_entity: + if key not in new_entity: + changes[key] = REMOVED_VALUE + + attr_changes = prepare_attribute_changes(old_entity, new_entity, replace) + if attr_changes: + changes["attrib"] = attr_changes + return changes diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 0efc46edaf..f930dec720 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -1128,11 +1128,15 @@ def format_anatomy(data): anatomy = Anatomy() log.debug("__ anatomy.templates: {}".format(anatomy.templates)) - padding = int( - anatomy.templates["render"].get( - "frame_padding" + padding = None + if "frame_padding" in anatomy.templates.keys(): + padding = int(anatomy.templates["frame_padding"]) + elif "render" in anatomy.templates.keys(): + padding = int( + anatomy.templates["render"].get( + "frame_padding" + ) ) - ) version = data.get("version", None) if not version: diff --git a/openpype/lib/execute.py b/openpype/lib/execute.py index 6f52efdfcc..6c1425fc63 100644 --- a/openpype/lib/execute.py +++ b/openpype/lib/execute.py @@ -5,6 +5,8 @@ import platform import json import tempfile +from openpype import AYON_SERVER_ENABLED + from .log import Logger from .vendor_bin_utils import find_executable @@ -321,19 +323,22 @@ def get_openpype_execute_args(*args): It is possible to pass any arguments that will be added after pype executables. """ - pype_executable = os.environ["OPENPYPE_EXECUTABLE"] - pype_args = [pype_executable] + executable = os.environ["OPENPYPE_EXECUTABLE"] + launch_args = [executable] - executable_filename = os.path.basename(pype_executable) + executable_filename = os.path.basename(executable) if "python" in executable_filename.lower(): - pype_args.append( - os.path.join(os.environ["OPENPYPE_ROOT"], "start.py") + filename = "start.py" + if AYON_SERVER_ENABLED: + filename = "ayon_start.py" + launch_args.append( + os.path.join(os.environ["OPENPYPE_ROOT"], filename) ) if args: - pype_args.extend(args) + launch_args.extend(args) - return pype_args + return launch_args def get_linux_launcher_args(*args): diff --git a/openpype/lib/local_settings.py b/openpype/lib/local_settings.py index c6c9699240..8f09c6be63 100644 --- a/openpype/lib/local_settings.py +++ b/openpype/lib/local_settings.py @@ -29,6 +29,7 @@ except ImportError: import six import appdirs +from openpype import AYON_SERVER_ENABLED from openpype.settings import ( get_local_settings, get_system_settings @@ -517,11 +518,54 @@ def _create_local_site_id(registry=None): return new_id +def get_ayon_appdirs(*args): + """Local app data directory of AYON client. + + Args: + *args (Iterable[str]): Subdirectories/files in local app data dir. + + Returns: + str: Path to directory/file in local app data dir. + """ + + return os.path.join( + appdirs.user_data_dir("ayon", "ynput"), + *args + ) + + +def _get_ayon_local_site_id(): + # used for background syncing + site_id = os.environ.get("AYON_SITE_ID") + if site_id: + return site_id + + site_id_path = get_ayon_appdirs("site_id") + if os.path.exists(site_id_path): + with open(site_id_path, "r") as stream: + site_id = stream.read() + + if site_id: + return site_id + + try: + from ayon_common.utils import get_local_site_id as _get_local_site_id + site_id = _get_local_site_id() + except ImportError: + raise ValueError("Couldn't access local site id") + + return site_id + + def get_local_site_id(): """Get local site identifier. Identifier is created if does not exists yet. """ + + if AYON_SERVER_ENABLED: + return _get_ayon_local_site_id() + # override local id from environment # used for background syncing if os.environ.get("OPENPYPE_LOCAL_ID"): diff --git a/openpype/lib/log.py b/openpype/lib/log.py index 26dcd86eec..dc2e6615fe 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -24,6 +24,7 @@ import traceback import threading import copy +from openpype import AYON_SERVER_ENABLED from openpype.client.mongo import ( MongoEnvNotSet, get_default_components, @@ -212,7 +213,7 @@ class Logger: log_mongo_url_components = None # Database name in Mongo - log_database_name = os.environ["OPENPYPE_DATABASE_NAME"] + log_database_name = os.environ.get("OPENPYPE_DATABASE_NAME") # Collection name under database in Mongo log_collection_name = "logs" @@ -326,12 +327,17 @@ class Logger: # Change initialization state to prevent runtime changes # if is executed during runtime cls.initialized = False - cls.log_mongo_url_components = get_default_components() + if not AYON_SERVER_ENABLED: + cls.log_mongo_url_components = get_default_components() # Define if should logging to mongo be used - use_mongo_logging = bool(log4mongo is not None) - if use_mongo_logging: - use_mongo_logging = os.environ.get("OPENPYPE_LOG_TO_SERVER") == "1" + if AYON_SERVER_ENABLED: + use_mongo_logging = False + else: + use_mongo_logging = ( + log4mongo is not None + and os.environ.get("OPENPYPE_LOG_TO_SERVER") == "1" + ) # Set mongo id for process (ONLY ONCE) if use_mongo_logging and cls.mongo_process_id is None: @@ -453,6 +459,9 @@ class Logger: if not cls.use_mongo_logging: return + if not cls.log_database_name: + raise ValueError("Database name for logs is not set") + client = log4mongo.handlers._connection if not client: client = cls.get_log_mongo_connection() diff --git a/openpype/lib/pype_info.py b/openpype/lib/pype_info.py index 8370ecc88f..2f57d76850 100644 --- a/openpype/lib/pype_info.py +++ b/openpype/lib/pype_info.py @@ -5,6 +5,7 @@ import platform import getpass import socket +from openpype import AYON_SERVER_ENABLED from openpype.settings.lib import get_local_settings from .execute import get_openpype_execute_args from .local_settings import get_local_site_id @@ -33,6 +34,21 @@ def get_openpype_info(): } +def get_ayon_info(): + executable_args = get_openpype_execute_args() + if is_running_from_build(): + version_type = "build" + else: + version_type = "code" + return { + "build_verison": get_build_version(), + "version_type": version_type, + "executable": executable_args[-1], + "ayon_root": os.environ["AYON_ROOT"], + "server_url": os.environ["AYON_SERVER_URL"] + } + + def get_workstation_info(): """Basic information about workstation.""" host_name = socket.gethostname() @@ -52,12 +68,17 @@ def get_workstation_info(): def get_all_current_info(): """All information about current process in one dictionary.""" - return { - "pype": get_openpype_info(), + + output = { "workstation": get_workstation_info(), "env": os.environ.copy(), "local_settings": get_local_settings() } + if AYON_SERVER_ENABLED: + output["ayon"] = get_ayon_info() + else: + output["openpype"] = get_openpype_info() + return output def extract_pype_info_to_file(dirpath): diff --git a/openpype/modules/base.py b/openpype/modules/base.py index fb9b4e1096..c1e928ff48 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -12,8 +12,12 @@ import collections import traceback from uuid import uuid4 from abc import ABCMeta, abstractmethod -import six +import six +import appdirs +import ayon_api + +from openpype import AYON_SERVER_ENABLED from openpype.settings import ( get_system_settings, SYSTEM_SETTINGS_KEY, @@ -186,7 +190,11 @@ def get_dynamic_modules_dirs(): Returns: list: Paths loaded from studio overrides. """ + output = [] + if AYON_SERVER_ENABLED: + return output + value = get_studio_system_settings_overrides() for key in ("modules", "addon_paths", platform.system().lower()): if key not in value: @@ -299,6 +307,108 @@ def load_modules(force=False): time.sleep(0.1) +def _get_ayon_addons_information(): + """Receive information about addons to use from server. + + Todos: + Actually ask server for the information. + Allow project name as optional argument to be able to query information + about used addons for specific project. + Returns: + List[Dict[str, Any]]: List of addon information to use. + """ + + return ayon_api.get_addons_info()["addons"] + + +def _load_ayon_addons(openpype_modules, modules_key, log): + """Load AYON addons based on information from server. + + This function should not trigger downloading of any addons but only use + what is already available on the machine (at least in first stages of + development). + + Args: + openpype_modules (_ModuleClass): Module object where modules are + stored. + log (logging.Logger): Logger object. + + Returns: + List[str]: List of v3 addons to skip to load because v4 alternative is + imported. + """ + + v3_addons_to_skip = [] + + addons_info = _get_ayon_addons_information() + if not addons_info: + return v3_addons_to_skip + addons_dir = os.path.join( + appdirs.user_data_dir("ayon", "ynput"), + "addons" + ) + if not os.path.exists(addons_dir): + log.warning("Addons directory does not exists. Path \"{}\"".format( + addons_dir + )) + return v3_addons_to_skip + + for addon_info in addons_info: + addon_name = addon_info["name"] + addon_version = addon_info.get("productionVersion") + if not addon_version: + continue + + folder_name = "{}_{}".format(addon_name, addon_version) + addon_dir = os.path.join(addons_dir, folder_name) + if not os.path.exists(addon_dir): + log.warning(( + "Directory for addon {} {} does not exists. Path \"{}\"" + ).format(addon_name, addon_version, addon_dir)) + continue + + sys.path.insert(0, addon_dir) + imported_modules = [] + for name in os.listdir(addon_dir): + path = os.path.join(addon_dir, name) + basename, ext = os.path.splitext(name) + is_dir = os.path.isdir(path) + is_py_file = ext.lower() == ".py" + if not is_py_file and not is_dir: + continue + + try: + mod = __import__(basename, fromlist=("",)) + imported_modules.append(mod) + except BaseException: + log.warning( + "Failed to import \"{}\"".format(basename), + exc_info=True + ) + + if not imported_modules: + log.warning("Addon {} {} has no content to import".format( + addon_name, addon_version + )) + continue + + if len(imported_modules) == 1: + mod = imported_modules[0] + addon_alias = getattr(mod, "V3_ALIAS", None) + if not addon_alias: + addon_alias = addon_name + v3_addons_to_skip.append(addon_alias) + new_import_str = "{}.{}".format(modules_key, addon_alias) + + sys.modules[new_import_str] = mod + setattr(openpype_modules, addon_alias, mod) + + else: + log.info("More then one module was imported") + + return v3_addons_to_skip + + def _load_modules(): # Key under which will be modules imported in `sys.modules` modules_key = "openpype_modules" @@ -308,6 +418,12 @@ def _load_modules(): log = Logger.get_logger("ModulesLoader") + ignore_addon_names = [] + if AYON_SERVER_ENABLED: + ignore_addon_names = _load_ayon_addons( + openpype_modules, modules_key, log + ) + # Look for OpenPype modules in paths defined with `get_module_dirs` # - dynamically imported OpenPype modules and addons module_dirs = get_module_dirs() @@ -351,6 +467,9 @@ def _load_modules(): fullpath = os.path.join(dirpath, filename) basename, ext = os.path.splitext(filename) + if basename in ignore_addon_names: + continue + # Validations if os.path.isdir(fullpath): # Check existence of init file diff --git a/openpype/modules/log_viewer/log_view_module.py b/openpype/modules/log_viewer/log_view_module.py index e9dba2041c..1cafbe4fbd 100644 --- a/openpype/modules/log_viewer/log_view_module.py +++ b/openpype/modules/log_viewer/log_view_module.py @@ -1,3 +1,4 @@ +from openpype import AYON_SERVER_ENABLED from openpype.modules import OpenPypeModule, ITrayModule @@ -7,6 +8,8 @@ class LogViewModule(OpenPypeModule, ITrayModule): def initialize(self, modules_settings): logging_settings = modules_settings[self.name] self.enabled = logging_settings["enabled"] + if AYON_SERVER_ENABLED: + self.enabled = False # Tray attributes self.window = None diff --git a/openpype/modules/project_manager_action.py b/openpype/modules/project_manager_action.py index 5f74dd9ee5..bf55e1544d 100644 --- a/openpype/modules/project_manager_action.py +++ b/openpype/modules/project_manager_action.py @@ -1,3 +1,4 @@ +from openpype import AYON_SERVER_ENABLED from openpype.modules import OpenPypeModule, ITrayAction @@ -11,6 +12,9 @@ class ProjectManagerAction(OpenPypeModule, ITrayAction): module_settings = modules_settings.get(self.name) if module_settings: enabled = module_settings.get("enabled", enabled) + + if AYON_SERVER_ENABLED: + enabled = False self.enabled = enabled # Tray attributes diff --git a/openpype/modules/settings_action.py b/openpype/modules/settings_action.py index 90092a133d..5950fbd910 100644 --- a/openpype/modules/settings_action.py +++ b/openpype/modules/settings_action.py @@ -1,3 +1,4 @@ +from openpype import AYON_SERVER_ENABLED from openpype.modules import OpenPypeModule, ITrayAction @@ -10,6 +11,8 @@ class SettingsAction(OpenPypeModule, ITrayAction): def initialize(self, _modules_settings): # This action is always enabled self.enabled = True + if AYON_SERVER_ENABLED: + self.enabled = False # User role # TODO should be changeable @@ -80,6 +83,8 @@ class LocalSettingsAction(OpenPypeModule, ITrayAction): def initialize(self, _modules_settings): # This action is always enabled self.enabled = True + if AYON_SERVER_ENABLED: + self.enabled = False # Tray attributes self.settings_window = None diff --git a/openpype/plugins/load/add_site.py b/openpype/modules/sync_server/plugins/load/add_site.py similarity index 100% rename from openpype/plugins/load/add_site.py rename to openpype/modules/sync_server/plugins/load/add_site.py diff --git a/openpype/plugins/load/remove_site.py b/openpype/modules/sync_server/plugins/load/remove_site.py similarity index 100% rename from openpype/plugins/load/remove_site.py rename to openpype/modules/sync_server/plugins/load/remove_site.py diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 98065b68a0..1b7b2dc3a6 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -536,8 +536,8 @@ class SyncServerThread(threading.Thread): _site_is_working(self.module, project_name, remote_site, remote_site_config)]): self.log.debug( - "Some of the sites {} - {} is not working properly".format( - local_site, remote_site + "Some of the sites {} - {} in {} is not working properly".format( # noqa + local_site, remote_site, project_name ) ) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index b85b045bd9..67856f0d8e 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -15,7 +15,7 @@ from openpype.client import ( get_representations, get_representation_by_id, ) -from openpype.modules import OpenPypeModule, ITrayModule +from openpype.modules import OpenPypeModule, ITrayModule, IPluginPaths from openpype.settings import ( get_project_settings, get_system_settings, @@ -39,7 +39,7 @@ from .utils import time_function, SyncStatus, SiteAlreadyPresentError log = Logger.get_logger("SyncServer") -class SyncServerModule(OpenPypeModule, ITrayModule): +class SyncServerModule(OpenPypeModule, ITrayModule, IPluginPaths): """ Synchronization server that is syncing published files from local to any of implemented providers (like GDrive, S3 etc.) @@ -136,6 +136,13 @@ class SyncServerModule(OpenPypeModule, ITrayModule): # projects that long tasks are running on self.projects_processed = set() + def get_plugin_paths(self): + """Deadline plugin paths.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + return { + "load": [os.path.join(current_dir, "plugins", "load")] + } + """ Start of Public API """ def add_site(self, project_name, representation_id, site_name=None, force=False, priority=None, reset_timer=False): @@ -204,6 +211,58 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if remove_local_files: self._remove_local_file(project_name, representation_id, site_name) + def get_progress_for_repre(self, doc, active_site, remote_site): + """ + Calculates average progress for representation. + If site has created_dt >> fully available >> progress == 1 + Could be calculated in aggregate if it would be too slow + Args: + doc(dict): representation dict + Returns: + (dict) with active and remote sites progress + {'studio': 1.0, 'gdrive': -1} - gdrive site is not present + -1 is used to highlight the site should be added + {'studio': 1.0, 'gdrive': 0.0} - gdrive site is present, not + uploaded yet + """ + progress = {active_site: -1, + remote_site: -1} + if not doc: + return progress + + files = {active_site: 0, remote_site: 0} + doc_files = doc.get("files") or [] + for doc_file in doc_files: + if not isinstance(doc_file, dict): + continue + + sites = doc_file.get("sites") or [] + for site in sites: + if ( + # Pype 2 compatibility + not isinstance(site, dict) + # Check if site name is one of progress sites + or site["name"] not in progress + ): + continue + + files[site["name"]] += 1 + norm_progress = max(progress[site["name"]], 0) + if site.get("created_dt"): + progress[site["name"]] = norm_progress + 1 + elif site.get("progress"): + progress[site["name"]] = norm_progress + site["progress"] + else: # site exists, might be failed, do not add again + progress[site["name"]] = 0 + + # for example 13 fully avail. files out of 26 >> 13/26 = 0.5 + avg_progress = {} + avg_progress[active_site] = \ + progress[active_site] / max(files[active_site], 1) + avg_progress[remote_site] = \ + progress[remote_site] / max(files[remote_site], 1) + return avg_progress + def compute_resource_sync_sites(self, project_name): """Get available resource sync sites state for publish process. diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index 30748206a3..029b5cc1ff 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -5,17 +5,19 @@ import platform import collections import numbers +import ayon_api import six import time +from openpype import AYON_SERVER_ENABLED from openpype.settings.lib import ( get_local_settings, ) from openpype.settings.constants import ( DEFAULT_PROJECT_KEY ) - from openpype.client import get_project +from openpype.lib import Logger, get_local_site_id from openpype.lib.path_templates import ( TemplateUnsolved, TemplateResult, @@ -23,7 +25,6 @@ from openpype.lib.path_templates import ( TemplatesDict, FormatObject, ) -from openpype.lib.log import Logger from openpype.modules import ModulesManager log = Logger.get_logger(__name__) @@ -475,6 +476,13 @@ class Anatomy(BaseAnatomy): Union[Dict[str, str], None]): Local root overrides. """ + if AYON_SERVER_ENABLED: + if not project_name: + return + return ayon_api.get_project_roots_for_site( + project_name, get_local_site_id() + ) + if local_settings is None: local_settings = get_local_settings() diff --git a/openpype/pipeline/legacy_io.py b/openpype/pipeline/legacy_io.py index bde2b24c2a..60fa035c22 100644 --- a/openpype/pipeline/legacy_io.py +++ b/openpype/pipeline/legacy_io.py @@ -4,6 +4,7 @@ import sys import logging import functools +from openpype import AYON_SERVER_ENABLED from . import schema from .mongodb import AvalonMongoDB, session_data_from_environment @@ -39,8 +40,9 @@ def install(): _connection_object.Session.update(session) _connection_object.install() - module._mongo_client = _connection_object.mongo_client - module._database = module.database = _connection_object.database + if not AYON_SERVER_ENABLED: + module._mongo_client = _connection_object.mongo_client + module._database = module.database = _connection_object.database module._is_installed = True diff --git a/openpype/pipeline/mongodb.py b/openpype/pipeline/mongodb.py index be2b67a5e7..41a44c7373 100644 --- a/openpype/pipeline/mongodb.py +++ b/openpype/pipeline/mongodb.py @@ -5,6 +5,7 @@ import logging import pymongo from uuid import uuid4 +from openpype import AYON_SERVER_ENABLED from openpype.client import OpenPypeMongoConnection from . import schema @@ -187,7 +188,8 @@ class AvalonMongoDB: return self._installed = True - self._database = self.mongo_client[str(os.environ["AVALON_DB"])] + if not AYON_SERVER_ENABLED: + self._database = self.mongo_client[str(os.environ["AVALON_DB"])] def uninstall(self): """Close any connection to the database""" diff --git a/openpype/pipeline/thumbnail.py b/openpype/pipeline/thumbnail.py index 39f3e17893..9d4a6f3e48 100644 --- a/openpype/pipeline/thumbnail.py +++ b/openpype/pipeline/thumbnail.py @@ -2,6 +2,7 @@ import os import copy import logging +from openpype import AYON_SERVER_ENABLED from openpype.client import get_project from . import legacy_io from .anatomy import Anatomy @@ -131,6 +132,32 @@ class BinaryThumbnail(ThumbnailResolver): return thumbnail_entity["data"].get("binary_data") +class ServerThumbnailResolver(ThumbnailResolver): + def process(self, thumbnail_entity, thumbnail_type): + if not AYON_SERVER_ENABLED: + return None + data = thumbnail_entity["data"] + entity_type = data.get("entity_type") + entity_id = data.get("entity_id") + if not entity_type or not entity_id: + return None + + from openpype.client.server.server_api import get_server_api_connection + + project_name = self.dbcon.active_project() + thumbnail_id = thumbnail_entity["_id"] + con = get_server_api_connection() + filepath = con.get_thumbnail( + project_name, entity_type, entity_id, thumbnail_id + ) + content = None + if filepath: + with open(filepath, "rb") as stream: + content = stream.read() + + return content + + # Thumbnail resolvers def discover_thumbnail_resolvers(): return discover(ThumbnailResolver) @@ -146,3 +173,4 @@ def register_thumbnail_resolver_path(path): register_thumbnail_resolver(TemplateResolver) register_thumbnail_resolver(BinaryThumbnail) +register_thumbnail_resolver(ServerThumbnailResolver) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index 493780645c..1d57545bc0 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -1,6 +1,7 @@ import collections from copy import deepcopy import pyblish.api +from openpype import AYON_SERVER_ENABLED from openpype.client import ( get_assets, get_archived_assets @@ -16,6 +17,9 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): families = ["clip", "shot"] def process(self, context): + if AYON_SERVER_ENABLED: + return + if "hierarchyContext" not in context.data: self.log.info("skipping IntegrateHierarchyToAvalon") return diff --git a/openpype/plugins/publish/extract_hierarchy_to_ayon.py b/openpype/plugins/publish/extract_hierarchy_to_ayon.py new file mode 100644 index 0000000000..915650ae41 --- /dev/null +++ b/openpype/plugins/publish/extract_hierarchy_to_ayon.py @@ -0,0 +1,234 @@ +import collections +import copy +import json +import uuid +import pyblish.api + +from ayon_api import slugify_string +from ayon_api.entity_hub import EntityHub + +from openpype import AYON_SERVER_ENABLED + + +def _default_json_parse(value): + return str(value) + + +class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): + """Create entities in AYON based on collected data.""" + + order = pyblish.api.ExtractorOrder - 0.01 + label = "Extract Hierarchy To AYON" + families = ["clip", "shot"] + + def process(self, context): + if not AYON_SERVER_ENABLED: + return + + hierarchy_context = context.data.get("hierarchyContext") + if not hierarchy_context: + self.log.info("Skipping") + return + + project_name = context.data["projectName"] + hierarchy_context = self._filter_hierarchy(context) + if not hierarchy_context: + self.log.info("All folders were filtered out") + return + + self.log.debug("Hierarchy_context: {}".format( + json.dumps(hierarchy_context, default=_default_json_parse) + )) + + entity_hub = EntityHub(project_name) + project = entity_hub.project_entity + + hierarchy_match_queue = collections.deque() + hierarchy_match_queue.append((project, hierarchy_context)) + while hierarchy_match_queue: + item = hierarchy_match_queue.popleft() + entity, entity_info = item + + # Update attributes of entities + for attr_name, attr_value in entity_info["attributes"].items(): + if attr_name in entity.attribs: + entity.attribs[attr_name] = attr_value + + # Check if info has any children to sync + children_info = entity_info["children"] + tasks_info = entity_info["tasks"] + if not tasks_info and not children_info: + continue + + # Prepare children by lowered name to easily find matching entities + children_by_low_name = { + child.name.lower(): child + for child in entity.children + } + + # Create tasks if are not available + for task_info in tasks_info: + task_label = task_info["name"] + task_name = slugify_string(task_label) + if task_name == task_label: + task_label = None + task_entity = children_by_low_name.get(task_name.lower()) + # TODO propagate updates of tasks if there are any + # TODO check if existing entity have 'task' type + if task_entity is None: + task_entity = entity_hub.add_new_task( + task_info["type"], + parent_id=entity.id, + name=task_name + ) + + if task_label: + task_entity.label = task_label + + # Create/Update sub-folders + for child_info in children_info: + child_label = child_info["name"] + child_name = slugify_string(child_label) + if child_name == child_label: + child_label = None + # TODO check if existing entity have 'folder' type + child_entity = children_by_low_name.get(child_name.lower()) + if child_entity is None: + child_entity = entity_hub.add_new_folder( + child_info["entity_type"], + parent_id=entity.id, + name=child_name + ) + + if child_label: + child_entity.label = child_label + + # Add folder to queue + hierarchy_match_queue.append((child_entity, child_info)) + + entity_hub.commit_changes() + + def _filter_hierarchy(self, context): + """Filter hierarchy context by active folder names. + + Hierarchy context is filtered to folder names on active instances. + + Change hierarchy context to unified structure which suits logic in + entity creation. + + Output example: + { + "name": "MyProject", + "entity_type": "Project", + "attributes": {}, + "tasks": [], + "children": [ + { + "name": "seq_01", + "entity_type": "Sequence", + "attributes": {}, + "tasks": [], + "children": [ + ... + ] + }, + ... + ] + } + + Todos: + Change how active folder are defined (names won't be enough in + AYON). + + Args: + context (pyblish.api.Context): Pyblish context. + + Returns: + dict[str, Any]: Hierarchy structure filtered by folder names. + """ + + # filter only the active publishing instances + active_folder_names = set() + for instance in context: + if instance.data.get("publish") is not False: + active_folder_names.add(instance.data.get("asset")) + + active_folder_names.discard(None) + + self.log.debug("Active folder names: {}".format(active_folder_names)) + if not active_folder_names: + return None + + project_item = None + project_children_context = None + for key, value in context.data["hierarchyContext"].items(): + project_item = copy.deepcopy(value) + project_children_context = project_item.pop("childs", None) + project_item["name"] = key + project_item["tasks"] = [] + project_item["attributes"] = project_item.pop( + "custom_attributes", {} + ) + project_item["children"] = [] + + if not project_children_context: + return None + + project_id = uuid.uuid4().hex + items_by_id = {project_id: project_item} + parent_id_by_item_id = {project_id: None} + valid_ids = set() + + hierarchy_queue = collections.deque() + hierarchy_queue.append((project_id, project_children_context)) + while hierarchy_queue: + queue_item = hierarchy_queue.popleft() + parent_id, children_context = queue_item + if not children_context: + continue + + for asset_name, asset_info in children_context.items(): + if ( + asset_name not in active_folder_names + and not asset_info.get("childs") + ): + continue + item_id = uuid.uuid4().hex + new_item = copy.deepcopy(asset_info) + new_item["name"] = asset_name + new_item["children"] = [] + new_children_context = new_item.pop("childs", None) + tasks = new_item.pop("tasks", {}) + task_items = [] + for task_name, task_info in tasks.items(): + task_info["name"] = task_name + task_items.append(task_info) + new_item["tasks"] = task_items + new_item["attributes"] = new_item.pop("custom_attributes", {}) + + items_by_id[item_id] = new_item + parent_id_by_item_id[item_id] = parent_id + + if asset_name in active_folder_names: + valid_ids.add(item_id) + hierarchy_queue.append((item_id, new_children_context)) + + if not valid_ids: + return None + + for item_id in set(valid_ids): + parent_id = parent_id_by_item_id[item_id] + while parent_id is not None and parent_id not in valid_ids: + valid_ids.add(parent_id) + parent_id = parent_id_by_item_id[parent_id] + + valid_ids.discard(project_id) + for item_id in valid_ids: + parent_id = parent_id_by_item_id[item_id] + item = items_by_id[item_id] + parent_item = items_by_id[parent_id] + parent_item["children"].append(item) + + if not project_item["children"]: + return None + return project_item diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index b71207c24f..b7feeac6a4 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -6,6 +6,7 @@ import shutil import pyblish.api +from openpype import AYON_SERVER_ENABLED from openpype.client import ( get_version_by_id, get_hero_version_by_subset_id, @@ -195,11 +196,20 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): entity_id = None if old_version: entity_id = old_version["_id"] - new_hero_version = new_hero_version_doc( - src_version_entity["_id"], - src_version_entity["parent"], - entity_id=entity_id - ) + + if AYON_SERVER_ENABLED: + new_hero_version = new_hero_version_doc( + src_version_entity["parent"], + copy.deepcopy(src_version_entity["data"]), + src_version_entity["name"], + entity_id=entity_id + ) + else: + new_hero_version = new_hero_version_doc( + src_version_entity["_id"], + src_version_entity["parent"], + entity_id=entity_id + ) if old_version: self.log.debug("Replacing old hero version.") diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index 2e87d8fc86..9929d8f754 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -18,6 +18,7 @@ import collections import six import pyblish.api +from openpype import AYON_SERVER_ENABLED from openpype.client import get_versions from openpype.client.operations import OperationsSession, new_thumbnail_doc from openpype.pipeline.publish import get_publish_instance_label @@ -39,6 +40,10 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): ] def process(self, context): + if AYON_SERVER_ENABLED: + self.log.info("AYON is enabled. Skipping v3 thumbnail integration") + return + # Filter instances which can be used for integration filtered_instance_items = self._prepare_instances(context) if not filtered_instance_items: diff --git a/openpype/plugins/publish/integrate_thumbnail_ayon.py b/openpype/plugins/publish/integrate_thumbnail_ayon.py new file mode 100644 index 0000000000..ba5664c69f --- /dev/null +++ b/openpype/plugins/publish/integrate_thumbnail_ayon.py @@ -0,0 +1,207 @@ +""" Integrate Thumbnails for Openpype use in Loaders. + + This thumbnail is different from 'thumbnail' representation which could + be uploaded to Ftrack, or used as any other representation in Loaders to + pull into a scene. + + This one is used only as image describing content of published item and + shows up only in Loader in right column section. +""" + +import os +import collections + +import pyblish.api + +from openpype import AYON_SERVER_ENABLED +from openpype.client import get_versions +from openpype.client.operations import OperationsSession + +InstanceFilterResult = collections.namedtuple( + "InstanceFilterResult", + ["instance", "thumbnail_path", "version_id"] +) + + +class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin): + """Integrate Thumbnails for Openpype use in Loaders.""" + + label = "Integrate Thumbnails to AYON" + order = pyblish.api.IntegratorOrder + 0.01 + + required_context_keys = [ + "project", "asset", "task", "subset", "version" + ] + + def process(self, context): + if not AYON_SERVER_ENABLED: + self.log.info("AYON is not enabled. Skipping") + return + + # Filter instances which can be used for integration + filtered_instance_items = self._prepare_instances(context) + if not filtered_instance_items: + self.log.info( + "All instances were filtered. Thumbnail integration skipped." + ) + return + + project_name = context.data["projectName"] + + # Collect version ids from all filtered instance + version_ids = { + instance_items.version_id + for instance_items in filtered_instance_items + } + # Query versions + version_docs = get_versions( + project_name, + version_ids=version_ids, + hero=True, + fields=["_id", "type", "name"] + ) + # Store version by their id (converted to string) + version_docs_by_str_id = { + str(version_doc["_id"]): version_doc + for version_doc in version_docs + } + self._integrate_thumbnails( + filtered_instance_items, + version_docs_by_str_id, + project_name + ) + + def _prepare_instances(self, context): + context_thumbnail_path = context.get("thumbnailPath") + valid_context_thumbnail = bool( + context_thumbnail_path + and os.path.exists(context_thumbnail_path) + ) + + filtered_instances = [] + for instance in context: + instance_label = self._get_instance_label(instance) + # Skip instances without published representations + # - there is no place where to put the thumbnail + published_repres = instance.data.get("published_representations") + if not published_repres: + self.log.debug(( + "There are no published representations" + " on the instance {}." + ).format(instance_label)) + continue + + # Find thumbnail path on instance + thumbnail_path = self._get_instance_thumbnail_path( + published_repres) + if thumbnail_path: + self.log.debug(( + "Found thumbnail path for instance \"{}\"." + " Thumbnail path: {}" + ).format(instance_label, thumbnail_path)) + + elif valid_context_thumbnail: + # Use context thumbnail path if is available + thumbnail_path = context_thumbnail_path + self.log.debug(( + "Using context thumbnail path for instance \"{}\"." + " Thumbnail path: {}" + ).format(instance_label, thumbnail_path)) + + # Skip instance if thumbnail path is not available for it + if not thumbnail_path: + self.log.info(( + "Skipping thumbnail integration for instance \"{}\"." + " Instance and context" + " thumbnail paths are not available." + ).format(instance_label)) + continue + + version_id = str(self._get_version_id(published_repres)) + filtered_instances.append( + InstanceFilterResult(instance, thumbnail_path, version_id) + ) + return filtered_instances + + def _get_version_id(self, published_representations): + for repre_info in published_representations.values(): + return repre_info["representation"]["parent"] + + def _get_instance_thumbnail_path(self, published_representations): + thumb_repre_doc = None + for repre_info in published_representations.values(): + repre_doc = repre_info["representation"] + if repre_doc["name"].lower() == "thumbnail": + thumb_repre_doc = repre_doc + break + + if thumb_repre_doc is None: + self.log.debug( + "There is not representation with name \"thumbnail\"" + ) + return None + + path = thumb_repre_doc["data"]["path"] + if not os.path.exists(path): + self.log.warning( + "Thumbnail file cannot be found. Path: {}".format(path) + ) + return None + return os.path.normpath(path) + + def _integrate_thumbnails( + self, + filtered_instance_items, + version_docs_by_str_id, + project_name + ): + from openpype.client.server.operations import create_thumbnail + + op_session = OperationsSession() + + for instance_item in filtered_instance_items: + instance, thumbnail_path, version_id = instance_item + instance_label = self._get_instance_label(instance) + version_doc = version_docs_by_str_id.get(version_id) + if not version_doc: + self.log.warning(( + "Version entity for instance \"{}\" was not found." + ).format(instance_label)) + continue + + thumbnail_id = create_thumbnail(project_name, thumbnail_path) + + # Set thumbnail id for version + op_session.update_entity( + project_name, + version_doc["type"], + version_doc["_id"], + {"data.thumbnail_id": thumbnail_id} + ) + if version_doc["type"] == "hero_version": + version_name = "Hero" + else: + version_name = version_doc["name"] + self.log.debug("Setting thumbnail for version \"{}\" <{}>".format( + version_name, version_id + )) + + asset_entity = instance.data["assetEntity"] + op_session.update_entity( + project_name, + asset_entity["type"], + asset_entity["_id"], + {"data.thumbnail_id": thumbnail_id} + ) + self.log.debug("Setting thumbnail for asset \"{}\" <{}>".format( + asset_entity["name"], version_id + )) + + op_session.commit() + + def _get_instance_label(self, instance): + return ( + instance.data.get("label") + or instance.data.get("name") + or "N/A" + ) diff --git a/openpype/resources/__init__.py b/openpype/resources/__init__.py index 0d7778e546..77cc0deaa2 100644 --- a/openpype/resources/__init__.py +++ b/openpype/resources/__init__.py @@ -1,4 +1,5 @@ import os +from openpype import AYON_SERVER_ENABLED from openpype.lib.openpype_version import is_running_staging RESOURCES_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -40,11 +41,17 @@ def get_liberation_font_path(bold=False, italic=False): def get_openpype_production_icon_filepath(): - return get_resource("icons", "openpype_icon.png") + filename = "openpype_icon.png" + if AYON_SERVER_ENABLED: + filename = "AYON_icon.png" + return get_resource("icons", filename) def get_openpype_staging_icon_filepath(): - return get_resource("icons", "openpype_icon_staging.png") + filename = "openpype_icon_staging.png" + if AYON_SERVER_ENABLED: + filename = "AYON_icon.png" + return get_resource("icons", filename) def get_openpype_icon_filepath(staging=None): @@ -60,7 +67,9 @@ def get_openpype_splash_filepath(staging=None): if staging is None: staging = is_running_staging() - if staging: + if AYON_SERVER_ENABLED: + splash_file_name = "AYON_splash.png" + elif staging: splash_file_name = "openpype_splash_staging.png" else: splash_file_name = "openpype_splash.png" diff --git a/openpype/resources/icons/AYON_icon.png b/openpype/resources/icons/AYON_icon.png new file mode 100644 index 0000000000..ed13aeea52 Binary files /dev/null and b/openpype/resources/icons/AYON_icon.png differ diff --git a/openpype/resources/icons/AYON_splash.png b/openpype/resources/icons/AYON_splash.png new file mode 100644 index 0000000000..734aefb740 Binary files /dev/null and b/openpype/resources/icons/AYON_splash.png differ diff --git a/openpype/settings/ayon_settings.py b/openpype/settings/ayon_settings.py new file mode 100644 index 0000000000..0402a7b5f3 --- /dev/null +++ b/openpype/settings/ayon_settings.py @@ -0,0 +1,1141 @@ +"""Helper functionality to convert AYON settings to OpenPype v3 settings. + +The settings are converted, so we can use v3 code with AYON settings. Once +the code of and addon is converted to full AYON addon which expect AYON +settings the conversion function can be removed. + +The conversion is hardcoded -> there is no other way how to achieve the result. + +Main entrypoints are functions: +- convert_project_settings - convert settings to project settings +- convert_system_settings - convert settings to system settings +# Both getters cache values +- get_ayon_project_settings - replacement for 'get_project_settings' +- get_ayon_system_settings - replacement for 'get_system_settings' +""" + +import json +import copy +import time + +import six +import ayon_api + + +def _convert_color(color_value): + if isinstance(color_value, six.string_types): + color_value = color_value.lstrip("#") + color_value_len = len(color_value) + _color_value = [] + for idx in range(color_value_len // 2): + _color_value.append(int(color_value[idx:idx + 2], 16)) + for _ in range(4 - len(_color_value)): + _color_value.append(255) + return _color_value + + if isinstance(color_value, list): + # WARNING R,G,B can be 'int' or 'float' + # - 'float' variant is using 'int' for min: 0 and max: 1 + if len(color_value) == 3: + # Add alpha + color_value.append(255) + else: + # Convert float alha to int + alpha = int(color_value[3] * 255) + if alpha > 255: + alpha = 255 + elif alpha < 0: + alpha = 0 + color_value[3] = alpha + return color_value + + +def _convert_host_imageio(host_settings): + if "imageio" not in host_settings: + return + + # --- imageio --- + ayon_imageio = host_settings["imageio"] + # TODO remove when fixed on server + if "ocio_config" in ayon_imageio["ocio_config"]: + ayon_imageio["ocio_config"]["filepath"] = ( + ayon_imageio["ocio_config"].pop("ocio_config") + ) + # Convert file rules + imageio_file_rules = ayon_imageio["file_rules"] + new_rules = {} + for rule in imageio_file_rules["rules"]: + name = rule.pop("name") + new_rules[name] = rule + imageio_file_rules["rules"] = new_rules + + +def _convert_applications_groups(groups, clear_metadata): + environment_key = "environment" + if isinstance(groups, dict): + new_groups = [] + for name, item in groups.items(): + item["name"] = name + new_groups.append(item) + groups = new_groups + + output = {} + group_dynamic_labels = {} + for group in groups: + group_name = group.pop("name") + if "label" in group: + group_dynamic_labels[group_name] = group["label"] + + tool_group_envs = group[environment_key] + if isinstance(tool_group_envs, six.string_types): + group[environment_key] = json.loads(tool_group_envs) + + variants = {} + variant_dynamic_labels = {} + for variant in group.pop("variants"): + variant_name = variant.pop("name") + variant_dynamic_labels[variant_name] = variant.pop("label") + variant_envs = variant[environment_key] + if isinstance(variant_envs, six.string_types): + variant[environment_key] = json.loads(variant_envs) + variants[variant_name] = variant + group["variants"] = variants + + if not clear_metadata: + variants["__dynamic_keys_labels__"] = variant_dynamic_labels + output[group_name] = group + + if not clear_metadata: + output["__dynamic_keys_labels__"] = group_dynamic_labels + return output + + +def _convert_applications(ayon_settings, output, clear_metadata): + # Addon settings + addon_settings = ayon_settings["applications"] + + # Applications settings + ayon_apps = addon_settings["applications"] + additional_apps = ayon_apps.pop("additional_apps") + applications = _convert_applications_groups( + ayon_apps, clear_metadata + ) + applications["additional_apps"] = _convert_applications_groups( + additional_apps, clear_metadata + ) + + # Tools settings + tools = _convert_applications_groups( + addon_settings["tool_groups"], clear_metadata + ) + + output["applications"] = applications + output["tools"] = {"tool_groups": tools} + + +def _convert_general(ayon_settings, output): + # TODO get studio name/code + core_settings = ayon_settings["core"] + environments = core_settings["environments"] + if isinstance(environments, six.string_types): + environments = json.loads(environments) + + output["general"].update({ + "log_to_server": False, + "studio_name": core_settings["studio_name"], + "studio_code": core_settings["studio_code"], + "environments": environments + }) + + +def _convert_kitsu_system_settings(ayon_settings, output): + kitsu_settings = output["modules"]["kitsu"] + kitsu_settings["server"] = ayon_settings["kitsu"]["server"] + + +def _convert_ftrack_system_settings(ayon_settings, output): + # TODO implement and convert rest of ftrack settings + ftrack_settings = output["modules"]["ftrack"] + ayon_ftrack = ayon_settings["ftrack"] + ftrack_settings["ftrack_server"] = ayon_ftrack["ftrack_server"] + + +def _convert_shotgrid_system_settings(ayon_settings, output): + ayon_shotgrid = ayon_settings["shotgrid"] + # Skip conversion if different ayon addon is used + if "leecher_manager_url" not in ayon_shotgrid: + return + + shotgrid_settings = output["modules"]["shotgrid"] + for key in ( + "leecher_manager_url", + "leecher_backend_url", + "filter_projects_by_login", + ): + shotgrid_settings[key] = ayon_shotgrid[key] + + new_items = {} + for item in ayon_shotgrid["shotgrid_settings"]: + name = item.pop("name") + new_items[name] = item + shotgrid_settings["shotgrid_settings"] = new_items + + +def _convert_timers_manager(ayon_settings, output): + manager_settings = output["modules"]["timers_manager"] + ayon_manager = ayon_settings["timers_manager"] + for key in { + "auto_stop", "full_time", "message_time", "disregard_publishing" + }: + manager_settings[key] = ayon_manager[key] + + +def _convert_clockify(ayon_settings, output): + clockify_settings = output["modules"]["clockify"] + ayon_clockify = ayon_settings["clockify"] + for key in { + "worskpace_name", + }: + clockify_settings[key] = ayon_clockify[key] + + +def _convert_deadline(ayon_settings, output): + deadline_settings = output["modules"]["deadline"] + ayon_deadline = ayon_settings["deadline"] + deadline_urls = {} + for item in ayon_deadline["deadline_urls"]: + deadline_urls[item["name"]] = item["value"] + deadline_settings["deadline_urls"] = deadline_urls + + +def _convert_muster(ayon_settings, output): + muster_settings = output["modules"]["muster"] + ayon_muster = ayon_settings["muster"] + templates_mapping = {} + for item in ayon_muster["templates_mapping"]: + templates_mapping[item["name"]] = item["value"] + muster_settings["templates_mapping"] = templates_mapping + muster_settings["MUSTER_REST_URL"] = ayon_muster["MUSTER_REST_URL"] + + +def _convert_royalrender(ayon_settings, output): + royalrender_settings = output["modules"]["royalrender"] + ayon_royalrender = ayon_settings["royalrender"] + royalrender_settings["rr_paths"] = { + item["name"]: item["value"] + for item in ayon_royalrender["rr_paths"] + } + + +def _convert_modules(ayon_settings, output, addon_versions): + # TODO add all modules + # TODO add 'enabled' values + for key, func in ( + ("kitsu", _convert_kitsu_system_settings), + ("ftrack", _convert_ftrack_system_settings), + ("shotgrid", _convert_shotgrid_system_settings), + ("timers_manager", _convert_timers_manager), + ("clockify", _convert_clockify), + ("deadline", _convert_deadline), + ("muster", _convert_muster), + ("royalrender", _convert_royalrender), + ): + if key in ayon_settings: + func(ayon_settings, output) + + for module_name, value in output["modules"].items(): + if "enabled" not in value: + continue + value["enabled"] = module_name in addon_versions + + # Missing modules conversions + # - "sync_server" -> renamed to sitesync + # - "slack" -> only 'enabled' + # - "job_queue" -> completelly missing in ayon + + +def convert_system_settings(ayon_settings, default_settings, addon_versions): + output = copy.deepcopy(default_settings) + if "applications" in ayon_settings: + _convert_applications(ayon_settings, output, False) + + if "core" in ayon_settings: + _convert_general(ayon_settings, output) + + _convert_modules(ayon_settings, output, addon_versions) + return output + + +# --------- Project settings --------- +def _convert_blender_project_settings(ayon_settings, output): + if "blender" not in ayon_settings: + return + ayon_blender = ayon_settings["blender"] + blender_settings = output["blender"] + _convert_host_imageio(ayon_blender) + + ayon_workfile_build = ayon_blender["workfile_builder"] + blender_workfile_build = blender_settings["workfile_builder"] + for key in ("create_first_version", "custom_templates"): + blender_workfile_build[key] = ayon_workfile_build[key] + + ayon_publish = ayon_blender["publish"] + model_validators = ayon_publish.pop("model_validators", None) + if model_validators is not None: + for src_key, dst_key in ( + ("validate_mesh_has_uvs", "ValidateMeshHasUvs"), + ("validate_mesh_no_negative_scale", "ValidateMeshNoNegativeScale"), + ("validate_transform_zero", "ValidateTransformZero"), + ): + ayon_publish[dst_key] = model_validators.pop(src_key) + + blender_publish = blender_settings["publish"] + for key in tuple(ayon_publish.keys()): + blender_publish[key] = ayon_publish[key] + + +def _convert_celaction_project_settings(ayon_settings, output): + if "celaction" not in ayon_settings: + return + ayon_celaction_publish = ayon_settings["celaction"]["publish"] + celaction_publish_settings = output["celaction"]["publish"] + + output["celaction"]["imageio"] = _convert_host_imageio( + ayon_celaction_publish + ) + + for plugin_name in tuple(celaction_publish_settings.keys()): + if plugin_name in ayon_celaction_publish: + celaction_publish_settings[plugin_name] = ( + ayon_celaction_publish[plugin_name] + ) + + +def _convert_flame_project_settings(ayon_settings, output): + if "flame" not in ayon_settings: + return + + ayon_flame = ayon_settings["flame"] + flame_settings = output["flame"] + flame_settings["create"] = ayon_flame["create"] + + ayon_load_flame = ayon_flame["load"] + load_flame_settings = flame_settings["load"] + # Wrong settings model on server side + for src_key, dst_key in ( + ("load_clip", "LoadClip"), + ("load_clip_batch", "LoadClipBatch"), + ): + if src_key in ayon_load_flame: + ayon_load_flame[dst_key] = ayon_load_flame.pop(src_key) + + for plugin_name in tuple(load_flame_settings.keys()): + if plugin_name in ayon_load_flame: + load_flame_settings[plugin_name] = ayon_load_flame[plugin_name] + + ayon_publish_flame = ayon_flame["publish"] + flame_publish_settings = flame_settings["publish"] + # 'ExtractSubsetResources' changed model of 'export_presets_mapping' + # - some keys were moved under 'other_parameters' + ayon_subset_resources = ayon_publish_flame["ExtractSubsetResources"] + new_subset_resources = {} + for item in ayon_subset_resources.pop("export_presets_mapping"): + name = item.pop("name") + if "other_parameters" in item: + other_parameters = item.pop("other_parameters") + item.update(other_parameters) + new_subset_resources[name] = item + + ayon_subset_resources["export_presets_mapping"] = new_subset_resources + for plugin_name in tuple(flame_publish_settings.keys()): + if plugin_name in ayon_publish_flame: + flame_publish_settings[plugin_name] = ( + ayon_publish_flame[plugin_name] + ) + + # 'imageio' changed model + # - missing subkey 'project' which is in root of 'imageio' model + _convert_host_imageio(ayon_flame) + ayon_imageio_flame = ayon_flame["imageio"] + if "project" not in ayon_imageio_flame: + profile_mapping = ayon_imageio_flame.pop("profilesMapping") + ayon_imageio_flame = { + "project": ayon_imageio_flame, + "profilesMapping": profile_mapping + } + flame_settings["imageio"] = ayon_imageio_flame + + +def _convert_fusion_project_settings(ayon_settings, output): + if "fusion" not in ayon_settings: + return + ayon_fusion = ayon_settings["fusion"] + ayon_imageio_fusion = ayon_fusion["imageio"] + + if "ocioSettings" in ayon_imageio_fusion: + ayon_ocio_setting = ayon_imageio_fusion.pop("ocioSettings") + paths = ayon_ocio_setting.pop("ocioPathModel") + for key, value in tuple(paths.items()): + new_value = [] + if value: + new_value.append(value) + paths[key] = new_value + + ayon_ocio_setting["configFilePath"] = paths + ayon_imageio_fusion["ocio"] = ayon_ocio_setting + + _convert_host_imageio(ayon_imageio_fusion) + + imageio_fusion_settings = output["fusion"]["imageio"] + for key in ( + "imageio", + ): + imageio_fusion_settings[key] = ayon_fusion[key] + + +def _convert_maya_project_settings(ayon_settings, output): + if "maya" not in ayon_settings: + return + + ayon_maya = ayon_settings["maya"] + openpype_maya = output["maya"] + + # Change key of render settings + ayon_maya["RenderSettings"] = ayon_maya.pop("render_settings") + + # Convert extensions mapping + ayon_maya["ext_mapping"] = { + item["name"]: item["value"] + for item in ayon_maya["ext_mapping"] + } + + # Publish UI filters + new_filters = {} + for item in ayon_maya["filters"]: + new_filters[item["name"]] = { + subitem["name"]: subitem["value"] + for subitem in item["value"] + } + ayon_maya["filters"] = new_filters + + # Maya dirmap + ayon_maya_dirmap = ayon_maya.pop("maya_dirmap") + ayon_maya_dirmap_path = ayon_maya_dirmap["paths"] + ayon_maya_dirmap_path["source-path"] = ( + ayon_maya_dirmap_path.pop("source_path") + ) + ayon_maya_dirmap_path["destination-path"] = ( + ayon_maya_dirmap_path.pop("destination_path") + ) + ayon_maya["maya-dirmap"] = ayon_maya_dirmap + + # Create plugins + ayon_create = ayon_maya["create"] + ayon_create_static_mesh = ayon_create["CreateUnrealStaticMesh"] + if "static_mesh_prefixes" in ayon_create_static_mesh: + ayon_create_static_mesh["static_mesh_prefix"] = ( + ayon_create_static_mesh.pop("static_mesh_prefixes") + ) + + # --- Publish (START) --- + ayon_publish = ayon_maya["publish"] + try: + attributes = json.loads( + ayon_publish["ValidateAttributes"]["attributes"] + ) + except ValueError: + attributes = {} + ayon_publish["ValidateAttributes"]["attributes"] = attributes + + try: + SUFFIX_NAMING_TABLE = json.loads( + ayon_publish + ["ValidateTransformNamingSuffix"] + ["SUFFIX_NAMING_TABLE"] + ) + except ValueError: + SUFFIX_NAMING_TABLE = {} + ayon_publish["ValidateTransformNamingSuffix"]["SUFFIX_NAMING_TABLE"] = ( + SUFFIX_NAMING_TABLE + ) + + # Extract playblast capture settings + validate_rendern_settings = ayon_publish["ValidateRenderSettings"] + for key in ( + "arnold_render_attributes", + "vray_render_attributes", + "redshift_render_attributes", + "renderman_render_attributes", + ): + if key not in validate_rendern_settings: + continue + validate_rendern_settings[key] = [ + [item["type"], item["value"]] + for item in validate_rendern_settings[key] + ] + + ayon_capture_preset = ayon_publish["ExtractPlayblast"]["capture_preset"] + display_options = ayon_capture_preset["DisplayOptions"] + for key in ("background", "backgroundBottom", "backgroundTop"): + display_options[key] = _convert_color(display_options[key]) + + for src_key, dst_key in ( + ("DisplayOptions", "Display Options"), + ("ViewportOptions", "Viewport Options"), + ("CameraOptions", "Camera Options"), + ): + ayon_capture_preset[dst_key] = ayon_capture_preset.pop(src_key) + + # Extract Camera Alembic bake attributes + try: + bake_attributes = json.loads( + ayon_publish["ExtractCameraAlembic"]["bake_attributes"] + ) + except ValueError: + bake_attributes = [] + ayon_publish["ExtractCameraAlembic"]["bake_attributes"] = bake_attributes + + # --- Publish (END) --- + for renderer_settings in ayon_maya["RenderSettings"].values(): + if ( + not isinstance(renderer_settings, dict) + or "additional_options" not in renderer_settings + ): + continue + renderer_settings["additional_options"] = [ + [item["attribute"], item["value"]] + for item in renderer_settings["additional_options"] + ] + + _convert_host_imageio(ayon_maya) + + same_keys = { + "imageio", + "scriptsmenu", + "templated_workfile_build", + "load", + "create", + "publish", + "mel_workspace", + "ext_mapping", + "workfile_build", + "filters", + "maya-dirmap", + "RenderSettings", + } + for key in same_keys: + openpype_maya[key] = ayon_maya[key] + + +def _convert_nuke_knobs(knobs): + new_knobs = [] + for knob in knobs: + knob_type = knob["type"] + value = knob[knob_type] + + if knob_type == "boolean": + knob_type = "bool" + + new_knob = { + "type": knob_type, + "name": knob["name"], + } + new_knobs.append(new_knob) + + if knob_type == "formatable": + new_knob["template"] = value["template"] + new_knob["to_type"] = value["to_type"] + continue + + value_key = "value" + if knob_type == "expression": + value_key = "expression" + + elif knob_type == "color_gui": + value = _convert_color(value) + + elif knob_type == "vector_2d": + value = [value["x"], value["y"]] + + elif knob_type == "vector_3d": + value = [value["x"], value["y"], value["z"]] + + new_knob[value_key] = value + return new_knobs + + +def _convert_nuke_project_settings(ayon_settings, output): + if "nuke" not in ayon_settings: + return + + ayon_nuke = ayon_settings["nuke"] + openpype_nuke = output["nuke"] + + # --- Dirmap --- + dirmap = ayon_nuke.pop("dirmap") + for src_key, dst_key in ( + ("source_path", "source-path"), + ("destination_path", "destination-path"), + ): + dirmap["paths"][dst_key] = dirmap["paths"].pop(src_key) + ayon_nuke["nuke-dirmap"] = dirmap + + # --- Filters --- + new_gui_filters = {} + for item in ayon_nuke.pop("filters"): + subvalue = {} + key = item["name"] + for subitem in item["value"]: + subvalue[subitem["name"]] = subitem["value"] + new_gui_filters[key] = subvalue + ayon_nuke["filters"] = new_gui_filters + + # --- Load --- + ayon_load = ayon_nuke["load"] + ayon_load["LoadClip"]["_representations"] = ( + ayon_load["LoadClip"].pop("representations_include") + ) + ayon_load["LoadImage"]["_representations"] = ( + ayon_load["LoadImage"].pop("representations_include") + ) + + # --- Create --- + ayon_create = ayon_nuke["create"] + for creator_name in ( + "CreateWritePrerender", + "CreateWriteImage", + "CreateWriteRender", + ): + new_prenodes = {} + for prenode in ayon_create[creator_name]["prenodes"]: + name = prenode.pop("name") + prenode["knobs"] = _convert_nuke_knobs(prenode["knobs"]) + new_prenodes[name] = prenode + + ayon_create[creator_name]["prenodes"] = new_prenodes + + # --- Publish --- + ayon_publish = ayon_nuke["publish"] + slate_mapping = ayon_publish["ExtractSlateFrame"]["key_value_mapping"] + for key in tuple(slate_mapping.keys()): + value = slate_mapping[key] + slate_mapping[key] = [value["enabled"], value["template"]] + + ayon_publish["ValidateKnobs"]["knobs"] = json.loads( + ayon_publish["ValidateKnobs"]["knobs"] + ) + + new_review_data_outputs = {} + for item in ayon_publish["ExtractReviewDataMov"]["outputs"]: + name = item.pop("name") + item["reformat_node_config"] = _convert_nuke_knobs( + item["reformat_node_config"]) + new_review_data_outputs[name] = item + ayon_publish["ExtractReviewDataMov"]["outputs"] = new_review_data_outputs + + # TODO 'ExtractThumbnail' does not have ideal schema in v3 + new_thumbnail_nodes = {} + for item in ayon_publish["ExtractThumbnail"]["nodes"]: + name = item["nodeclass"] + value = [] + for knob in _convert_nuke_knobs(item["knobs"]): + knob_name = knob["name"] + # This may crash + if knob["type"] == "expression": + knob_value = knob["expression"] + else: + knob_value = knob["value"] + value.append([knob_name, knob_value]) + new_thumbnail_nodes[name] = value + + ayon_publish["ExtractThumbnail"]["nodes"] = new_thumbnail_nodes + + # --- ImageIO --- + # NOTE 'monitorOutLut' is maybe not yet in v3 (ut should be) + _convert_host_imageio(ayon_nuke) + ayon_imageio = ayon_nuke["imageio"] + for item in ayon_imageio["nodes"]["requiredNodes"]: + item["knobs"] = _convert_nuke_knobs(item["knobs"]) + for item in ayon_imageio["nodes"]["overrideNodes"]: + item["knobs"] = _convert_nuke_knobs(item["knobs"]) + + # Store converted values to openpype values + for key in ( + "scriptsmenu", + "nuke-dirmap", + "filters", + "load", + "create", + "publish", + "workfile_builder", + "imageio", + ): + openpype_nuke[key] = ayon_nuke[key] + + +def _convert_hiero_project_settings(ayon_settings, output): + if "hiero" not in ayon_settings: + return + + ayon_hiero = ayon_settings["hiero"] + openpype_hiero = output["hiero"] + + new_gui_filters = {} + for item in ayon_hiero.pop("filters"): + subvalue = {} + key = item["name"] + for subitem in item["value"]: + subvalue[subitem["name"]] = subitem["value"] + new_gui_filters[key] = subvalue + ayon_hiero["filters"] = new_gui_filters + + _convert_host_imageio(ayon_hiero) + + for key in ( + "create", + "filters", + "imageio", + "load", + "publish", + "scriptsmenu", + ): + openpype_hiero[key] = ayon_hiero[key] + + +def _convert_photoshop_project_settings(ayon_settings, output): + if "photoshop" not in ayon_settings: + return + + ayon_photoshop = ayon_settings["photoshop"] + photoshop_settings = output["photoshop"] + collect_review = ayon_photoshop["publish"]["CollectReview"] + if "active" in collect_review: + collect_review["publish"] = collect_review.pop("active") + + _convert_host_imageio(ayon_photoshop) + + for key in ( + "create", + "publish", + "workfile_builder", + "imageio", + ): + photoshop_settings[key] = ayon_photoshop[key] + + +def _convert_tvpaint_project_settings(ayon_settings, output): + if "tvpaint" not in ayon_settings: + return + ayon_tvpaint = ayon_settings["tvpaint"] + tvpaint_settings = output["tvpaint"] + + _convert_host_imageio(ayon_tvpaint) + + for key in ( + "stop_timer_on_application_exit", + "load", + "workfile_builder", + "imageio", + ): + tvpaint_settings[key] = ayon_tvpaint[key] + + filters = {} + for item in ayon_tvpaint["filters"]: + value = item["value"] + try: + value = json.loads(value) + + except ValueError: + value = {} + filters[item["name"]] = value + tvpaint_settings["filters"] = filters + + ayon_publish_settings = ayon_tvpaint["publish"] + tvpaint_publish_settings = tvpaint_settings["publish"] + for plugin_name in ("CollectRenderScene", "ExtractConvertToEXR"): + tvpaint_publish_settings[plugin_name] = ( + ayon_publish_settings[plugin_name] + ) + + for plugin_name in ( + "ValidateProjectSettings", + "ValidateMarks", + "ValidateStartFrame", + "ValidateAssetName", + ): + ayon_value = ayon_publish_settings[plugin_name] + tvpaint_value = tvpaint_publish_settings[plugin_name] + for src_key, dst_key in ( + ("action_enabled", "optional"), + ("action_enable", "active"), + ): + if src_key in ayon_value: + tvpaint_value[dst_key] = ayon_value[src_key] + + review_color = ayon_publish_settings["ExtractSequence"]["review_bg"] + tvpaint_publish_settings["ExtractSequence"]["review_bg"] = _convert_color( + review_color + ) + + +def _convert_traypublisher_project_settings(ayon_settings, output): + if "traypublisher" not in ayon_settings: + return + + ayon_traypublisher = ayon_settings["traypublisher"] + traypublisher_settings = output["traypublisher"] + + _convert_host_imageio(ayon_traypublisher) + traypublisher_settings["imageio"] = ayon_traypublisher["imageio"] + + ayon_editorial_simple = ( + ayon_traypublisher["editorial_creators"]["editorial_simple"] + ) + if "shot_metadata_creator" in ayon_editorial_simple: + shot_metadata_creator = ayon_editorial_simple.pop( + "shot_metadata_creator" + ) + if isinstance(shot_metadata_creator["clip_name_tokenizer"], dict): + shot_metadata_creator["clip_name_tokenizer"] = [ + {"name": "_sequence_", "regex": "(sc\\d{3})"}, + {"name": "_shot_", "regex": "(sh\\d{3})"}, + ] + ayon_editorial_simple.update(shot_metadata_creator) + + ayon_editorial_simple["clip_name_tokenizer"] = { + item["name"]: item["regex"] + for item in ayon_editorial_simple["clip_name_tokenizer"] + } + + if "shot_subset_creator" in ayon_editorial_simple: + ayon_editorial_simple.update( + ayon_editorial_simple.pop("shot_subset_creator")) + for item in ayon_editorial_simple["shot_hierarchy"]["parents"]: + item["type"] = item.pop("parent_type") + + shot_add_tasks = ayon_editorial_simple["shot_add_tasks"] + if isinstance(shot_add_tasks, dict): + shot_add_tasks = [] + new_shot_add_tasks = { + item["name"]: item["task_type"] + for item in shot_add_tasks + } + ayon_editorial_simple["shot_add_tasks"] = new_shot_add_tasks + + traypublisher_settings["editorial_creators"][ + "editorial_simple" + ] = ayon_editorial_simple + + +def _convert_webpublisher_project_settings(ayon_settings, output): + if "webpublisher" not in ayon_settings: + return + + ayon_webpublisher = ayon_settings["webpublisher"] + _convert_host_imageio(ayon_webpublisher) + + ayon_publish = ayon_webpublisher["publish"] + + ayon_collect_files = ayon_publish["CollectPublishedFiles"] + ayon_collect_files["task_type_to_family"] = { + item["name"]: item["value"] + for item in ayon_collect_files["task_type_to_family"] + } + output["webpublisher"]["publish"] = ayon_publish + output["webpublisher"]["imageio"] = ayon_webpublisher["imageio"] + + +def _convert_deadline_project_settings(ayon_settings, output): + if "deadline" not in ayon_settings: + return + + ayon_deadline = ayon_settings["deadline"] + deadline_settings = output["deadline"] + + for key in ("deadline_urls",): + ayon_deadline.pop(key) + + ayon_deadline_publish = ayon_deadline["publish"] + limit_groups = { + item["name"]: item["value"] + for item in ayon_deadline_publish["NukeSubmitDeadline"]["limit_groups"] + } + ayon_deadline_publish["NukeSubmitDeadline"]["limit_groups"] = limit_groups + + maya_submit = ayon_deadline_publish["MayaSubmitDeadline"] + for json_key in ("jobInfo", "pluginInfo"): + src_text = maya_submit.pop(json_key) + try: + value = json.loads(src_text) + except ValueError: + value = {} + maya_submit[json_key] = value + + nuke_submit = ayon_deadline_publish["NukeSubmitDeadline"] + nuke_submit["env_search_replace_values"] = { + item["name"]: item["value"] + for item in nuke_submit.pop("env_search_replace_values") + } + nuke_submit["limit_groups"] = { + item["name"]: item["value"] for item in nuke_submit.pop("limit_groups") + } + + process_subsetted_job = ayon_deadline_publish["ProcessSubmittedJobOnFarm"] + process_subsetted_job["aov_filter"] = { + item["name"]: item["value"] + for item in process_subsetted_job.pop("aov_filter") + } + deadline_publish_settings = deadline_settings["publish"] + for key in tuple(deadline_publish_settings.keys()): + if key in ayon_deadline_publish: + deadline_publish_settings[key] = ayon_deadline_publish[key] + + +def _convert_kitsu_project_settings(ayon_settings, output): + if "kitsu" not in ayon_settings: + return + + ayon_kitsu = ayon_settings["kitsu"] + kitsu_settings = output["kitsu"] + for key in tuple(kitsu_settings.keys()): + if key in ayon_kitsu: + kitsu_settings[key] = ayon_kitsu[key] + + +def _convert_shotgrid_project_settings(ayon_settings, output): + if "shotgrid" not in ayon_settings: + return + + ayon_shotgrid = ayon_settings["shotgrid"] + for key in { + "leecher_backend_url", + "filter_projects_by_login", + "shotgrid_settings", + "leecher_manager_url", + }: + ayon_shotgrid.pop(key) + + asset_field = ayon_shotgrid["fields"]["asset"] + asset_field["type"] = asset_field.pop("asset_type") + + task_field = ayon_shotgrid["fields"]["task"] + if "task" in task_field: + task_field["step"] = task_field.pop("task") + + shotgrid_settings = output["shotgrid"] + for key in tuple(shotgrid_settings.keys()): + if key in ayon_shotgrid: + shotgrid_settings[key] = ayon_shotgrid[key] + + +def _convert_slack_project_settings(ayon_settings, output): + if "slack" not in ayon_settings: + return + + ayon_slack = ayon_settings["slack"] + slack_settings = output["slack"] + ayon_slack.pop("enabled", None) + for profile in ayon_slack["publish"]["CollectSlackFamilies"]["profiles"]: + profile["tasks"] = profile.pop("task_names") + profile["subsets"] = profile.pop("subset_names") + + for key in tuple(slack_settings.keys()): + if key in ayon_settings: + slack_settings[key] = ayon_settings[key] + + +def _convert_global_project_settings(ayon_settings, output): + if "core" not in ayon_settings: + return + + ayon_core = ayon_settings["core"] + global_settings = output["global"] + + # Publish conversion + ayon_publish = ayon_core["publish"] + for profile in ayon_publish["ExtractReview"]["profiles"]: + new_outputs = {} + for output_def in profile.pop("outputs"): + name = output_def.pop("name") + new_outputs[name] = output_def + + for color_key in ("overscan_color", "bg_color"): + output_def[color_key] = _convert_color(output_def[color_key]) + + letter_box = output_def["letter_box"] + for color_key in ("fill_color", "line_color"): + letter_box[color_key] = _convert_color(letter_box[color_key]) + + if "output_width" in output_def: + output_def["width"] = output_def.pop("output_width") + + if "output_height" in output_def: + output_def["height"] = output_def.pop("output_height") + + profile["outputs"] = new_outputs + + extract_burnin = ayon_publish["ExtractBurnin"] + extract_burnin_options = extract_burnin["options"] + for color_key in ("font_color", "bg_color"): + extract_burnin_options[color_key] = _convert_color( + extract_burnin_options[color_key] + ) + + for profile in extract_burnin["profiles"]: + extract_burnin_defs = profile["burnins"] + profile["burnins"] = { + extract_burnin_def.pop("name"): extract_burnin_def + for extract_burnin_def in extract_burnin_defs + } + + global_publish = global_settings["publish"] + ayon_integrate_hero = ayon_publish["IntegrateHeroVersion"] + global_integrate_hero = global_publish["IntegrateHeroVersion"] + for key, value in global_integrate_hero.items(): + if key not in ayon_integrate_hero: + ayon_integrate_hero[key] = value + + ayon_cleanup = ayon_publish["CleanUp"] + if "patterns" in ayon_cleanup: + ayon_cleanup["paterns"] = ayon_cleanup.pop("patterns") + + for key in tuple(global_publish.keys()): + if key in ayon_publish: + global_publish[key] = ayon_publish[key] + + # Project root settings + for json_key in ("project_folder_structure", "project_environments"): + try: + value = json.loads(ayon_core[json_key]) + except ValueError: + value = {} + global_publish[json_key] = value + + # Tools settings + ayon_tools = ayon_core["tools"] + global_tools = global_settings["tools"] + ayon_create_tool = ayon_tools["creator"] + new_smart_select_families = { + item["name"]: item["task_names"] + for item in ayon_create_tool["families_smart_select"] + } + ayon_create_tool["families_smart_select"] = new_smart_select_families + global_tools["creator"] = ayon_create_tool + + ayon_loader_tool = ayon_tools["loader"] + for profile in ayon_loader_tool["family_filter_profiles"]: + if "template_publish_families" in profile: + profile["filter_families"] = ( + profile.pop("template_publish_families") + ) + global_tools["loader"] = ayon_loader_tool + + global_tools["publish"] = ayon_tools["publish"] + + +def convert_project_settings(ayon_settings, default_settings): + # Missing settings + # - standalonepublisher + output = copy.deepcopy(default_settings) + exact_match = { + "aftereffects", + "harmony", + "houdini", + "resolve", + "unreal", + } + for key in exact_match: + if key in ayon_settings: + output[key] = ayon_settings[key] + + _convert_blender_project_settings(ayon_settings, output) + _convert_celaction_project_settings(ayon_settings, output) + _convert_flame_project_settings(ayon_settings, output) + _convert_fusion_project_settings(ayon_settings, output) + _convert_maya_project_settings(ayon_settings, output) + _convert_nuke_project_settings(ayon_settings, output) + _convert_hiero_project_settings(ayon_settings, output) + _convert_photoshop_project_settings(ayon_settings, output) + _convert_tvpaint_project_settings(ayon_settings, output) + _convert_traypublisher_project_settings(ayon_settings, output) + _convert_webpublisher_project_settings(ayon_settings, output) + + _convert_deadline_project_settings(ayon_settings, output) + _convert_kitsu_project_settings(ayon_settings, output) + _convert_shotgrid_project_settings(ayon_settings, output) + _convert_slack_project_settings(ayon_settings, output) + + _convert_global_project_settings(ayon_settings, output) + + return output + + +class CacheItem: + lifetime = 10 + + def __init__(self, value): + self._value = value + self._outdate_time = time.time() + self.lifetime + + def get_value(self): + return copy.deepcopy(self._value) + + def update_value(self, value): + self._value = value + self._outdate_time = time.time() + self.lifetime + + @property + def is_outdated(self): + return time.time() > self._outdate_time + + +class AyonSettingsCache: + _cache_by_project_name = {} + _production_settings = None + + @classmethod + def get_production_settings(cls): + if ( + cls._production_settings is None + or cls._production_settings.is_outdated + ): + value = ayon_api.get_addons_settings(only_values=False) + if cls._production_settings is None: + cls._production_settings = CacheItem(value) + else: + cls._production_settings.update_value(value) + return cls._production_settings.get_value() + + @classmethod + def get_value_by_project(cls, project_name): + production_settings = cls.get_production_settings() + addon_versions = production_settings["versions"] + if project_name is None: + return production_settings["settings"], addon_versions + + cache_item = cls._cache_by_project_name.get(project_name) + if cache_item is None or cache_item.is_outdated: + value = ayon_api.get_addons_settings(project_name) + if cache_item is None: + cache_item = CacheItem(value) + cls._cache_by_project_name[project_name] = cache_item + else: + cache_item.update_value(value) + + return cache_item.get_value(), addon_versions + + +def get_ayon_project_settings(default_values, project_name): + ayon_settings, addon_versions = ( + AyonSettingsCache.get_value_by_project(project_name) + ) + return convert_project_settings(ayon_settings, default_values) + + +def get_ayon_system_settings(default_values): + ayon_settings, addon_versions = ( + AyonSettingsCache.get_value_by_project(None) + ) + return convert_system_settings( + ayon_settings, default_values, addon_versions + ) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index a1f3331ccc..ab7cdd058c 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -7,10 +7,14 @@ from abc import ABCMeta, abstractmethod import six import openpype.version -from openpype.client.mongo import OpenPypeMongoConnection -from openpype.client.entities import get_project_connection, get_project +from openpype.client.mongo import ( + OpenPypeMongoConnection, + get_project_connection, +) +from openpype.client.entities import get_project from openpype.lib.pype_info import get_workstation_info + from .constants import ( GLOBAL_SETTINGS_KEY, SYSTEM_SETTINGS_KEY, diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 73554df236..ce62dde43f 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -4,6 +4,9 @@ import functools import logging import platform import copy + +from openpype import AYON_SERVER_ENABLED + from .exceptions import ( SaveWarningExc ) @@ -18,6 +21,11 @@ from .constants import ( DEFAULT_PROJECT_KEY ) +from .ayon_settings import ( + get_ayon_project_settings, + get_ayon_system_settings +) + log = logging.getLogger(__name__) # Py2 + Py3 json decode exception @@ -40,36 +48,17 @@ _SETTINGS_HANDLER = None _LOCAL_SETTINGS_HANDLER = None -def require_handler(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - global _SETTINGS_HANDLER - if _SETTINGS_HANDLER is None: - _SETTINGS_HANDLER = create_settings_handler() - return func(*args, **kwargs) - return wrapper - - -def require_local_handler(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - global _LOCAL_SETTINGS_HANDLER - if _LOCAL_SETTINGS_HANDLER is None: - _LOCAL_SETTINGS_HANDLER = create_local_settings_handler() - return func(*args, **kwargs) - return wrapper - - -def create_settings_handler(): - from .handlers import MongoSettingsHandler - # Handler can't be created in global space on initialization but only when - # needed. Plus here may be logic: Which handler is used (in future). - return MongoSettingsHandler() - - -def create_local_settings_handler(): - from .handlers import MongoLocalSettingsHandler - return MongoLocalSettingsHandler() +def clear_metadata_from_settings(values): + """Remove all metadata keys from loaded settings.""" + if isinstance(values, dict): + for key in tuple(values.keys()): + if key in METADATA_KEYS: + values.pop(key) + else: + clear_metadata_from_settings(values[key]) + elif isinstance(values, list): + for item in values: + clear_metadata_from_settings(item) def calculate_changes(old_value, new_value): @@ -91,6 +80,42 @@ def calculate_changes(old_value, new_value): return changes +def create_settings_handler(): + if AYON_SERVER_ENABLED: + raise RuntimeError("Mongo settings handler was triggered in AYON mode") + from .handlers import MongoSettingsHandler + # Handler can't be created in global space on initialization but only when + # needed. Plus here may be logic: Which handler is used (in future). + return MongoSettingsHandler() + + +def create_local_settings_handler(): + if AYON_SERVER_ENABLED: + raise RuntimeError("Mongo settings handler was triggered in AYON mode") + from .handlers import MongoLocalSettingsHandler + return MongoLocalSettingsHandler() + + +def require_handler(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + global _SETTINGS_HANDLER + if _SETTINGS_HANDLER is None: + _SETTINGS_HANDLER = create_settings_handler() + return func(*args, **kwargs) + return wrapper + + +def require_local_handler(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + global _LOCAL_SETTINGS_HANDLER + if _LOCAL_SETTINGS_HANDLER is None: + _LOCAL_SETTINGS_HANDLER = create_local_settings_handler() + return func(*args, **kwargs) + return wrapper + + @require_handler def get_system_last_saved_info(): return _SETTINGS_HANDLER.get_system_last_saved_info() @@ -494,10 +519,17 @@ def save_local_settings(data): @require_local_handler -def get_local_settings(): +def _get_local_settings(): return _LOCAL_SETTINGS_HANDLER.get_local_settings() +def get_local_settings(): + if not AYON_SERVER_ENABLED: + return _get_local_settings() + # TODO implement ayon implementation + return {} + + def load_openpype_default_settings(): """Load openpype default settings.""" return load_jsons_from_dir(DEFAULTS_DIR) @@ -890,7 +922,7 @@ def apply_local_settings_on_project_settings( sync_server_config["remote_site"] = remote_site -def get_system_settings(clear_metadata=True, exclude_locals=None): +def _get_system_settings(clear_metadata=True, exclude_locals=None): """System settings with applied studio overrides.""" default_values = get_default_settings()[SYSTEM_SETTINGS_KEY] studio_values = get_studio_system_settings_overrides() @@ -992,7 +1024,7 @@ def get_anatomy_settings( return result -def get_project_settings( +def _get_project_settings( project_name, clear_metadata=True, exclude_locals=None ): """Project settings with applied studio and project overrides.""" @@ -1043,7 +1075,7 @@ def get_current_project_settings(): @require_handler -def get_global_settings(): +def _get_global_settings(): default_settings = load_openpype_default_settings() default_values = default_settings["system_settings"]["general"] studio_values = _SETTINGS_HANDLER.get_global_settings() @@ -1053,7 +1085,14 @@ def get_global_settings(): } -def get_general_environments(): +def get_global_settings(): + if not AYON_SERVER_ENABLED: + return _get_global_settings() + default_settings = load_openpype_default_settings() + return default_settings["system_settings"]["general"] + + +def _get_general_environments(): """Get general environments. Function is implemented to be able load general environments without using @@ -1082,14 +1121,24 @@ def get_general_environments(): return environments -def clear_metadata_from_settings(values): - """Remove all metadata keys from loaded settings.""" - if isinstance(values, dict): - for key in tuple(values.keys()): - if key in METADATA_KEYS: - values.pop(key) - else: - clear_metadata_from_settings(values[key]) - elif isinstance(values, list): - for item in values: - clear_metadata_from_settings(item) +def get_general_environments(): + if not AYON_SERVER_ENABLED: + return _get_general_environments() + value = get_system_settings() + return value["general"]["environment"] + + +def get_system_settings(*args, **kwargs): + if not AYON_SERVER_ENABLED: + return _get_system_settings(*args, **kwargs) + + default_settings = get_default_settings()[SYSTEM_SETTINGS_KEY] + return get_ayon_system_settings(default_settings) + + +def get_project_settings(project_name, *args, **kwargs): + if not AYON_SERVER_ENABLED: + return _get_project_settings(project_name, *args, **kwargs) + + default_settings = get_default_settings()[PROJECT_SETTINGS_KEY] + return get_ayon_project_settings(default_settings, project_name) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index e58e02f89a..1ec695b915 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -1173,9 +1173,9 @@ class RepresentationModel(TreeModel, BaseRepresentationModel): repre_groups_items[doc["name"]] = 0 group = group_item - progress = lib.get_progress_for_repre( - doc, self.active_site, self.remote_site - ) + progress = self.sync_server.get_progress_for_repre( + doc, + self.active_site, self.remote_site) active_site_icon = self._icons.get(self.active_provider) remote_site_icon = self._icons.get(self.remote_provider) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index b3aa381d14..5dd3af08d6 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -886,7 +886,9 @@ class ThumbnailWidget(QtWidgets.QLabel): self.set_pixmap() return - thumbnail_ent = get_thumbnail(project_name, thumbnail_id) + thumbnail_ent = get_thumbnail( + project_name, thumbnail_id, src_type, src_id + ) if not thumbnail_ent: return diff --git a/openpype/tools/sceneinventory/lib.py b/openpype/tools/sceneinventory/lib.py index 5db3c479c5..4b1860342a 100644 --- a/openpype/tools/sceneinventory/lib.py +++ b/openpype/tools/sceneinventory/lib.py @@ -28,55 +28,3 @@ def get_site_icons(): return icons - -def get_progress_for_repre(repre_doc, active_site, remote_site): - """ - Calculates average progress for representation. - - If site has created_dt >> fully available >> progress == 1 - - Could be calculated in aggregate if it would be too slow - Args: - repre_doc(dict): representation dict - Returns: - (dict) with active and remote sites progress - {'studio': 1.0, 'gdrive': -1} - gdrive site is not present - -1 is used to highlight the site should be added - {'studio': 1.0, 'gdrive': 0.0} - gdrive site is present, not - uploaded yet - """ - progress = {active_site: -1, remote_site: -1} - if not repre_doc: - return progress - - files = {active_site: 0, remote_site: 0} - doc_files = repre_doc.get("files") or [] - for doc_file in doc_files: - if not isinstance(doc_file, dict): - continue - - sites = doc_file.get("sites") or [] - for site in sites: - if ( - # Pype 2 compatibility - not isinstance(site, dict) - # Check if site name is one of progress sites - or site["name"] not in progress - ): - continue - - files[site["name"]] += 1 - norm_progress = max(progress[site["name"]], 0) - if site.get("created_dt"): - progress[site["name"]] = norm_progress + 1 - elif site.get("progress"): - progress[site["name"]] = norm_progress + site["progress"] - else: # site exists, might be failed, do not add again - progress[site["name"]] = 0 - - # for example 13 fully avail. files out of 26 >> 13/26 = 0.5 - avg_progress = { - active_site: progress[active_site] / max(files[active_site], 1), - remote_site: progress[remote_site] / max(files[remote_site], 1) - } - return avg_progress diff --git a/openpype/tools/sceneinventory/model.py b/openpype/tools/sceneinventory/model.py index 5cc849bb9e..815ecf9efe 100644 --- a/openpype/tools/sceneinventory/model.py +++ b/openpype/tools/sceneinventory/model.py @@ -27,7 +27,6 @@ from openpype.modules import ModulesManager from .lib import ( get_site_icons, walk_hierarchy, - get_progress_for_repre ) @@ -80,7 +79,7 @@ class InventoryModel(TreeModel): project_name, remote_site ) - # self.sync_server = sync_server + self.sync_server = sync_server self.active_site = active_site self.active_provider = active_provider self.remote_site = remote_site @@ -445,7 +444,7 @@ class InventoryModel(TreeModel): group_node["group"] = subset["data"].get("subsetGroup") if self.sync_enabled: - progress = get_progress_for_repre( + progress = self.sync_server.get_progress_for_repre( representation, self.active_site, self.remote_site ) group_node["active_site"] = self.active_site diff --git a/openpype/tools/sceneinventory/view.py b/openpype/tools/sceneinventory/view.py index 57e6e24411..d22b2bdd0f 100644 --- a/openpype/tools/sceneinventory/view.py +++ b/openpype/tools/sceneinventory/view.py @@ -23,7 +23,6 @@ from openpype.pipeline import ( ) from openpype.modules import ModulesManager from openpype.tools.utils.lib import ( - get_progress_for_repre, iter_model_rows, format_version ) @@ -361,7 +360,7 @@ class SceneInventoryView(QtWidgets.QTreeView): if not repre_doc: continue - progress = get_progress_for_repre( + progress = self.sync_server.get_progress_for_repre( repre_doc, active_site, remote_site diff --git a/openpype/tools/tray/pype_info_widget.py b/openpype/tools/tray/pype_info_widget.py index c616ad4dba..dc222b79b5 100644 --- a/openpype/tools/tray/pype_info_widget.py +++ b/openpype/tools/tray/pype_info_widget.py @@ -2,11 +2,14 @@ import os import json import collections +import ayon_api from qtpy import QtCore, QtGui, QtWidgets from openpype import style from openpype import resources +from openpype import AYON_SERVER_ENABLED from openpype.settings.lib import get_local_settings +from openpype.lib import get_openpype_execute_args from openpype.lib.pype_info import ( get_all_current_info, get_openpype_info, @@ -327,8 +330,9 @@ class PypeInfoSubWidget(QtWidgets.QWidget): main_layout.addWidget(self._create_openpype_info_widget(), 0) main_layout.addWidget(self._create_separator(), 0) main_layout.addWidget(self._create_workstation_widget(), 0) - main_layout.addWidget(self._create_separator(), 0) - main_layout.addWidget(self._create_local_settings_widget(), 0) + if not AYON_SERVER_ENABLED: + main_layout.addWidget(self._create_separator(), 0) + main_layout.addWidget(self._create_local_settings_widget(), 0) main_layout.addWidget(self._create_separator(), 0) main_layout.addWidget(self._create_environ_widget(), 1) @@ -425,31 +429,59 @@ class PypeInfoSubWidget(QtWidgets.QWidget): def _create_openpype_info_widget(self): """Create widget with information about OpenPype application.""" - # Get pype info data - pype_info = get_openpype_info() - # Modify version key/values - version_value = "{} ({})".format( - pype_info.pop("version", self.not_applicable), - pype_info.pop("version_type", self.not_applicable) - ) - pype_info["version_value"] = version_value - # Prepare label mapping - key_label_mapping = { - "version_value": "Running version:", - "build_verison": "Build version:", - "executable": "OpenPype executable:", - "pype_root": "OpenPype location:", - "mongo_url": "OpenPype Mongo URL:" - } - # Prepare keys order - keys_order = [ - "version_value", - "build_verison", - "executable", - "pype_root", - "mongo_url" - ] - for key in pype_info.keys(): + if AYON_SERVER_ENABLED: + executable_args = get_openpype_execute_args() + username = "N/A" + user_info = ayon_api.get_user() + if user_info: + username = user_info.get("name") or username + full_name = user_info.get("attrib", {}).get("fullName") + if full_name: + username = "{} ({})".format(full_name, username) + info_values = { + "executable": executable_args[-1], + "server_url": os.environ["AYON_SERVER_URL"], + "username": username + } + key_label_mapping = { + "executable": "AYON Executable:", + "server_url": "AYON Server:", + "username": "AYON Username:" + } + # Prepare keys order + keys_order = [ + "server_url", + "username", + "executable", + ] + + else: + # Get pype info data + info_values = get_openpype_info() + # Modify version key/values + version_value = "{} ({})".format( + info_values.pop("version", self.not_applicable), + info_values.pop("version_type", self.not_applicable) + ) + info_values["version_value"] = version_value + # Prepare label mapping + key_label_mapping = { + "version_value": "Running version:", + "build_verison": "Build version:", + "executable": "OpenPype executable:", + "pype_root": "OpenPype location:", + "mongo_url": "OpenPype Mongo URL:" + } + # Prepare keys order + keys_order = [ + "version_value", + "build_verison", + "executable", + "pype_root", + "mongo_url" + ] + + for key in info_values.keys(): if key not in keys_order: keys_order.append(key) @@ -466,9 +498,9 @@ class PypeInfoSubWidget(QtWidgets.QWidget): info_layout.addWidget(title_label, 0, 0, 1, 2) for key in keys_order: - if key not in pype_info: + if key not in info_values: continue - value = pype_info[key] + value = info_values[key] label = key_label_mapping.get(key, key) row = info_layout.rowCount() info_layout.addWidget( diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index fdc0a8094d..1cf128e59d 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -8,6 +8,7 @@ import platform from qtpy import QtCore, QtGui, QtWidgets import openpype.version +from openpype import AYON_SERVER_ENABLED from openpype import resources, style from openpype.lib import ( Logger, @@ -589,6 +590,11 @@ class TrayManager: self.tray_widget.showMessage(*args, **kwargs) def _add_version_item(self): + if AYON_SERVER_ENABLED: + login_action = QtWidgets.QAction("Login", self.tray_widget) + login_action.triggered.connect(self._on_ayon_login) + self.tray_widget.menu.addAction(login_action) + subversion = os.environ.get("OPENPYPE_SUBVERSION") client_name = os.environ.get("OPENPYPE_CLIENT") @@ -614,6 +620,19 @@ class TrayManager: self._restart_action = restart_action + def _on_ayon_login(self): + self.execute_in_main_thread(self._show_ayon_login) + + def _show_ayon_login(self): + from ayon_common.connection.credentials import change_user_ui + + result = change_user_ui() + if result.shutdown: + self.exit() + + elif result.restart or result.token_changed: + self.restart() + def _on_restart_action(self): self.restart(use_expected_version=True) diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index 58ece7c68f..7b3faddf08 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -752,61 +752,6 @@ def get_repre_icons(): return icons -def get_progress_for_repre(doc, active_site, remote_site): - """ - Calculates average progress for representation. - - If site has created_dt >> fully available >> progress == 1 - - Could be calculated in aggregate if it would be too slow - Args: - doc(dict): representation dict - Returns: - (dict) with active and remote sites progress - {'studio': 1.0, 'gdrive': -1} - gdrive site is not present - -1 is used to highlight the site should be added - {'studio': 1.0, 'gdrive': 0.0} - gdrive site is present, not - uploaded yet - """ - progress = {active_site: -1, - remote_site: -1} - if not doc: - return progress - - files = {active_site: 0, remote_site: 0} - doc_files = doc.get("files") or [] - for doc_file in doc_files: - if not isinstance(doc_file, dict): - continue - - sites = doc_file.get("sites") or [] - for site in sites: - if ( - # Pype 2 compatibility - not isinstance(site, dict) - # Check if site name is one of progress sites - or site["name"] not in progress - ): - continue - - files[site["name"]] += 1 - norm_progress = max(progress[site["name"]], 0) - if site.get("created_dt"): - progress[site["name"]] = norm_progress + 1 - elif site.get("progress"): - progress[site["name"]] = norm_progress + site["progress"] - else: # site exists, might be failed, do not add again - progress[site["name"]] = 0 - - # for example 13 fully avail. files out of 26 >> 13/26 = 0.5 - avg_progress = {} - avg_progress[active_site] = \ - progress[active_site] / max(files[active_site], 1) - avg_progress[remote_site] = \ - progress[remote_site] / max(files[remote_site], 1) - return avg_progress - - def is_sync_loader(loader): return is_remove_site_loader(loader) or is_add_site_loader(loader) diff --git a/poetry.lock b/poetry.lock index d7bdc5f7c4..f915832fb8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -302,6 +302,24 @@ files = [ pycodestyle = ">=2.10.0" tomli = {version = "*", markers = "python_version < \"3.11\""} +[[package]] +name = "ayon-python-api" +version = "0.1.16" +description = "AYON Python API" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "ayon-python-api-0.1.16.tar.gz", hash = "sha256:666110954dd75b2be1699a29b4732cfb0bcb09d01f64fba4449bfc8ac1fb43f1"}, + {file = "ayon_python_api-0.1.16-py3-none-any.whl", hash = "sha256:bbcd6df1f80ddf32e653a1bb31289cb5fd1a8bea36ab4c8e6aef08c41b6393de"}, +] + +[package.dependencies] +appdirs = ">=1,<2" +requests = ">=2.27.1" +six = ">=1.15" +Unidecode = ">=1.2.0" + [[package]] name = "babel" version = "2.11.0" @@ -3371,14 +3389,14 @@ test = ["coverage", "pytest", "pytest-cov"] [[package]] name = "unidecode" -version = "1.3.6" +version = "1.2.0" description = "ASCII transliterations of Unicode text" -category = "dev" -optional = true -python-versions = ">=3.5" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "Unidecode-1.3.6-py3-none-any.whl", hash = "sha256:547d7c479e4f377b430dd91ac1275d593308dce0fc464fb2ab7d41f82ec653be"}, - {file = "Unidecode-1.3.6.tar.gz", hash = "sha256:fed09cf0be8cf415b391642c2a5addfc72194407caee4f98719e40ec2a72b830"}, + {file = "Unidecode-1.2.0-py2.py3-none-any.whl", hash = "sha256:12435ef2fc4cdfd9cf1035a1db7e98b6b047fe591892e81f34e94959591fad00"}, + {file = "Unidecode-1.2.0.tar.gz", hash = "sha256:8d73a97d387a956922344f6b74243c2c6771594659778744b2dbdaad8f6b727d"}, ] [[package]] @@ -3672,10 +3690,7 @@ files = [ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] -[extras] -docs = [] - [metadata] lock-version = "2.0" python-versions = ">=3.9.1,<3.10" -content-hash = "47518c544a90cdb3e99e83533557515d0d47079ac4461708ce71ab3ce97b9987" +content-hash = "6bdb0572a9e255898497ad5ec4d7368d4e0850ce9f4d5c72a37394a2f8f7ec06" diff --git a/pyproject.toml b/pyproject.toml index fe9c228ea9..ebd7ea127d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,9 @@ requests = "^2.25.1" pysftp = "^0.2.9" dropbox = "^11.20.0" aiohttp-middlewares = "^2.0.0" +ayon-python-api = "^0.1" opencolorio = "^2.2.0" +Unidecode = "^1.2" [tool.poetry.dev-dependencies] flake8 = "^6.0" diff --git a/setup.py b/setup.py index ab6e22bccc..f915f0e8ae 100644 --- a/setup.py +++ b/setup.py @@ -126,6 +126,7 @@ bin_includes = [ include_files = [ "igniter", "openpype", + "common", "schema", "LICENSE", "README.md" @@ -158,11 +159,35 @@ bdist_mac_options = dict( ) executables = [ - Executable("start.py", base=base, - target_name="openpype_gui", icon=icon_path.as_posix()), - Executable("start.py", base=None, - target_name="openpype_console", icon=icon_path.as_posix()) + Executable( + "start.py", + base=base, + target_name="openpype_gui", + icon=icon_path.as_posix() + ), + Executable( + "start.py", + base=None, + target_name="openpype_console", + icon=icon_path.as_posix() + ), + Executable( + "ayon_start.py", + base=base, + target_name="ayon", + icon=icon_path.as_posix() + ), ] +if IS_WINDOWS: + executables.append( + Executable( + "ayon_start.py", + base=None, + target_name="ayon_console", + icon=icon_path.as_posix() + ) + ) + if IS_LINUX: executables.append( Executable( diff --git a/start.py b/start.py index 36e2540200..f8d65dc221 100644 --- a/start.py +++ b/start.py @@ -133,6 +133,10 @@ else: vendor_python_path = os.path.join(OPENPYPE_ROOT, "vendor", "python") sys.path.insert(0, vendor_python_path) +# Add common package to sys path +# - common contains common code for bootstraping and OpenPype processes +sys.path.insert(0, os.path.join(OPENPYPE_ROOT, "common")) + import blessed # noqa: E402 import certifi # noqa: E402 diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 300024dc98..f04607dc27 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -12,7 +12,7 @@ import requests import re from tests.lib.db_handler import DBHandler -from common.openpype_common.distribution.file_handler import RemoteFileHandler +from common.ayon_common.distribution.file_handler import RemoteFileHandler from openpype.modules import ModulesManager from openpype.settings import get_project_settings diff --git a/tools/run_tray_ayon.ps1 b/tools/run_tray_ayon.ps1 new file mode 100644 index 0000000000..c0651bdbbe --- /dev/null +++ b/tools/run_tray_ayon.ps1 @@ -0,0 +1,41 @@ +<# +.SYNOPSIS + Helper script OpenPype Tray. + +.DESCRIPTION + + +.EXAMPLE + +PS> .\run_tray.ps1 + +#> +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$openpype_root = (Get-Item $script_dir).parent.FullName + +# Install PSWriteColor to support colorized output to terminal +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" + +$env:_INSIDE_OPENPYPE_TOOL = "1" + +# make sure Poetry is in PATH +if (-not (Test-Path 'env:POETRY_HOME')) { + $env:POETRY_HOME = "$openpype_root\.poetry" +} +$env:PATH = "$($env:PATH);$($env:POETRY_HOME)\bin" + + +Set-Location -Path $openpype_root + +Write-Color -Text ">>> ", "Reading Poetry ... " -Color Green, Gray -NoNewline +if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { + Write-Color -Text "NOT FOUND" -Color Yellow + Write-Color -Text "*** ", "We need to install Poetry create virtual env first ..." -Color Yellow, Gray + & "$openpype_root\tools\create_env.ps1" +} else { + Write-Color -Text "OK" -Color Green +} + +& "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\ayon_start.py" tray --debug +Set-Location -Path $current_dir