mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-02 00:44:52 +01:00
Merge branch 'develop' into bugfix/houdini_creator_settings
This commit is contained in:
commit
fdd8b8624b
225 changed files with 10753 additions and 5518 deletions
6
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
6
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,9 @@ body:
|
|||
label: Version
|
||||
description: What version are you running? Look to OpenPype Tray
|
||||
options:
|
||||
- 3.16.3-nightly.4
|
||||
- 3.16.3-nightly.3
|
||||
- 3.16.3-nightly.2
|
||||
- 3.16.3-nightly.1
|
||||
- 3.16.2
|
||||
- 3.16.2-nightly.2
|
||||
|
|
@ -132,9 +135,6 @@ body:
|
|||
- 3.14.7-nightly.5
|
||||
- 3.14.7-nightly.4
|
||||
- 3.14.7-nightly.3
|
||||
- 3.14.7-nightly.2
|
||||
- 3.14.7-nightly.1
|
||||
- 3.14.6
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
|
|
|||
483
ayon_start.py
483
ayon_start.py
|
|
@ -1,483 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Main entry point for AYON command.
|
||||
|
||||
Bootstrapping process of AYON.
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import site
|
||||
import traceback
|
||||
import contextlib
|
||||
|
||||
|
||||
# Enabled logging debug mode when "--debug" is passed
|
||||
if "--verbose" in sys.argv:
|
||||
expected_values = (
|
||||
"Expected: notset, debug, info, warning, error, critical"
|
||||
" or integer [0-50]."
|
||||
)
|
||||
idx = sys.argv.index("--verbose")
|
||||
sys.argv.pop(idx)
|
||||
if idx < len(sys.argv):
|
||||
value = sys.argv.pop(idx)
|
||||
else:
|
||||
raise RuntimeError((
|
||||
f"Expect value after \"--verbose\" argument. {expected_values}"
|
||||
))
|
||||
|
||||
log_level = None
|
||||
low_value = value.lower()
|
||||
if low_value.isdigit():
|
||||
log_level = int(low_value)
|
||||
elif low_value == "notset":
|
||||
log_level = 0
|
||||
elif low_value == "debug":
|
||||
log_level = 10
|
||||
elif low_value == "info":
|
||||
log_level = 20
|
||||
elif low_value == "warning":
|
||||
log_level = 30
|
||||
elif low_value == "error":
|
||||
log_level = 40
|
||||
elif low_value == "critical":
|
||||
log_level = 50
|
||||
|
||||
if log_level is None:
|
||||
raise ValueError((
|
||||
"Unexpected value after \"--verbose\" "
|
||||
f"argument \"{value}\". {expected_values}"
|
||||
))
|
||||
|
||||
os.environ["OPENPYPE_LOG_LEVEL"] = str(log_level)
|
||||
os.environ["AYON_LOG_LEVEL"] = str(log_level)
|
||||
|
||||
# Enable debug mode, may affect log level if log level is not defined
|
||||
if "--debug" in sys.argv:
|
||||
sys.argv.remove("--debug")
|
||||
os.environ["AYON_DEBUG"] = "1"
|
||||
os.environ["OPENPYPE_DEBUG"] = "1"
|
||||
|
||||
if "--automatic-tests" in sys.argv:
|
||||
sys.argv.remove("--automatic-tests")
|
||||
os.environ["IS_TEST"] = "1"
|
||||
|
||||
SKIP_HEADERS = False
|
||||
if "--skip-headers" in sys.argv:
|
||||
sys.argv.remove("--skip-headers")
|
||||
SKIP_HEADERS = True
|
||||
|
||||
SKIP_BOOTSTRAP = False
|
||||
if "--skip-bootstrap" in sys.argv:
|
||||
sys.argv.remove("--skip-bootstrap")
|
||||
SKIP_BOOTSTRAP = True
|
||||
|
||||
if "--use-staging" in sys.argv:
|
||||
sys.argv.remove("--use-staging")
|
||||
os.environ["AYON_USE_STAGING"] = "1"
|
||||
os.environ["OPENPYPE_USE_STAGING"] = "1"
|
||||
|
||||
if "--headless" in sys.argv:
|
||||
os.environ["AYON_HEADLESS_MODE"] = "1"
|
||||
os.environ["OPENPYPE_HEADLESS_MODE"] = "1"
|
||||
sys.argv.remove("--headless")
|
||||
|
||||
elif (
|
||||
os.getenv("AYON_HEADLESS_MODE") != "1"
|
||||
or os.getenv("OPENPYPE_HEADLESS_MODE") != "1"
|
||||
):
|
||||
os.environ.pop("AYON_HEADLESS_MODE", None)
|
||||
os.environ.pop("OPENPYPE_HEADLESS_MODE", None)
|
||||
|
||||
elif (
|
||||
os.getenv("AYON_HEADLESS_MODE")
|
||||
!= os.getenv("OPENPYPE_HEADLESS_MODE")
|
||||
):
|
||||
os.environ["OPENPYPE_HEADLESS_MODE"] = (
|
||||
os.environ["AYON_HEADLESS_MODE"]
|
||||
)
|
||||
|
||||
IS_BUILT_APPLICATION = getattr(sys, "frozen", False)
|
||||
HEADLESS_MODE_ENABLED = os.getenv("AYON_HEADLESS_MODE") == "1"
|
||||
|
||||
_pythonpath = os.getenv("PYTHONPATH", "")
|
||||
_python_paths = _pythonpath.split(os.pathsep)
|
||||
if not IS_BUILT_APPLICATION:
|
||||
# Code root defined by `start.py` directory
|
||||
AYON_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
_dependencies_path = site.getsitepackages()[-1]
|
||||
else:
|
||||
AYON_ROOT = os.path.dirname(sys.executable)
|
||||
|
||||
# add dependencies folder to sys.pat for frozen code
|
||||
_dependencies_path = os.path.normpath(
|
||||
os.path.join(AYON_ROOT, "dependencies")
|
||||
)
|
||||
# add stuff from `<frozen>/dependencies` to PYTHONPATH.
|
||||
sys.path.append(_dependencies_path)
|
||||
_python_paths.append(_dependencies_path)
|
||||
|
||||
# Vendored python modules that must not be in PYTHONPATH environment but
|
||||
# are required for OpenPype processes
|
||||
sys.path.insert(0, os.path.join(AYON_ROOT, "vendor", "python"))
|
||||
|
||||
# Add common package to sys path
|
||||
# - common contains common code for bootstraping and OpenPype processes
|
||||
sys.path.insert(0, os.path.join(AYON_ROOT, "common"))
|
||||
|
||||
# This is content of 'core' addon which is ATM part of build
|
||||
common_python_vendor = os.path.join(
|
||||
AYON_ROOT,
|
||||
"openpype",
|
||||
"vendor",
|
||||
"python",
|
||||
"common"
|
||||
)
|
||||
# Add tools dir to sys path for pyblish UI discovery
|
||||
tools_dir = os.path.join(AYON_ROOT, "openpype", "tools")
|
||||
for path in (AYON_ROOT, common_python_vendor, tools_dir):
|
||||
while path in _python_paths:
|
||||
_python_paths.remove(path)
|
||||
|
||||
while path in sys.path:
|
||||
sys.path.remove(path)
|
||||
|
||||
_python_paths.insert(0, path)
|
||||
sys.path.insert(0, path)
|
||||
|
||||
os.environ["PYTHONPATH"] = os.pathsep.join(_python_paths)
|
||||
|
||||
# enabled AYON state
|
||||
os.environ["USE_AYON_SERVER"] = "1"
|
||||
# Set this to point either to `python` from venv in case of live code
|
||||
# or to `ayon` or `ayon_console` in case of frozen code
|
||||
os.environ["AYON_EXECUTABLE"] = sys.executable
|
||||
os.environ["OPENPYPE_EXECUTABLE"] = sys.executable
|
||||
os.environ["AYON_ROOT"] = AYON_ROOT
|
||||
os.environ["OPENPYPE_ROOT"] = AYON_ROOT
|
||||
os.environ["OPENPYPE_REPOS_ROOT"] = AYON_ROOT
|
||||
os.environ["AYON_MENU_LABEL"] = "AYON"
|
||||
os.environ["AVALON_LABEL"] = "AYON"
|
||||
# Set name of pyblish UI import
|
||||
os.environ["PYBLISH_GUI"] = "pyblish_pype"
|
||||
# Set builtin OCIO root
|
||||
os.environ["BUILTIN_OCIO_ROOT"] = os.path.join(
|
||||
AYON_ROOT,
|
||||
"vendor",
|
||||
"bin",
|
||||
"ocioconfig",
|
||||
"OpenColorIOConfigs"
|
||||
)
|
||||
|
||||
import blessed # noqa: E402
|
||||
import certifi # noqa: E402
|
||||
|
||||
|
||||
if sys.__stdout__:
|
||||
term = blessed.Terminal()
|
||||
|
||||
def _print(message: str):
|
||||
if message.startswith("!!! "):
|
||||
print(f'{term.orangered2("!!! ")}{message[4:]}')
|
||||
elif message.startswith(">>> "):
|
||||
print(f'{term.aquamarine3(">>> ")}{message[4:]}')
|
||||
elif message.startswith("--- "):
|
||||
print(f'{term.darkolivegreen3("--- ")}{message[4:]}')
|
||||
elif message.startswith("*** "):
|
||||
print(f'{term.gold("*** ")}{message[4:]}')
|
||||
elif message.startswith(" - "):
|
||||
print(f'{term.wheat(" - ")}{message[4:]}')
|
||||
elif message.startswith(" . "):
|
||||
print(f'{term.tan(" . ")}{message[4:]}')
|
||||
elif message.startswith(" - "):
|
||||
print(f'{term.seagreen3(" - ")}{message[7:]}')
|
||||
elif message.startswith(" ! "):
|
||||
print(f'{term.goldenrod(" ! ")}{message[7:]}')
|
||||
elif message.startswith(" * "):
|
||||
print(f'{term.aquamarine1(" * ")}{message[7:]}')
|
||||
elif message.startswith(" "):
|
||||
print(f'{term.darkseagreen3(" ")}{message[4:]}')
|
||||
else:
|
||||
print(message)
|
||||
else:
|
||||
def _print(message: str):
|
||||
print(message)
|
||||
|
||||
|
||||
# if SSL_CERT_FILE is not set prior to OpenPype launch, we set it to point
|
||||
# to certifi bundle to make sure we have reasonably new CA certificates.
|
||||
if not os.getenv("SSL_CERT_FILE"):
|
||||
os.environ["SSL_CERT_FILE"] = certifi.where()
|
||||
elif os.getenv("SSL_CERT_FILE") != certifi.where():
|
||||
_print("--- your system is set to use custom CA certificate bundle.")
|
||||
|
||||
from ayon_api import get_base_url
|
||||
from ayon_api.constants import SERVER_URL_ENV_KEY, SERVER_API_ENV_KEY
|
||||
from ayon_common import is_staging_enabled
|
||||
from ayon_common.connection.credentials import (
|
||||
ask_to_login_ui,
|
||||
add_server,
|
||||
need_server_or_login,
|
||||
load_environments,
|
||||
set_environments,
|
||||
create_global_connection,
|
||||
confirm_server_login,
|
||||
)
|
||||
from ayon_common.distribution import (
|
||||
AyonDistribution,
|
||||
BundleNotFoundError,
|
||||
show_missing_bundle_information,
|
||||
)
|
||||
|
||||
|
||||
def set_global_environments() -> None:
|
||||
"""Set global OpenPype's environments."""
|
||||
import acre
|
||||
|
||||
from openpype.settings import get_general_environments
|
||||
|
||||
general_env = get_general_environments()
|
||||
|
||||
# first resolve general environment because merge doesn't expect
|
||||
# values to be list.
|
||||
# TODO: switch to OpenPype environment functions
|
||||
merged_env = acre.merge(
|
||||
acre.compute(acre.parse(general_env), cleanup=False),
|
||||
dict(os.environ)
|
||||
)
|
||||
env = acre.compute(
|
||||
merged_env,
|
||||
cleanup=False
|
||||
)
|
||||
os.environ.clear()
|
||||
os.environ.update(env)
|
||||
|
||||
# Hardcoded default values
|
||||
os.environ["PYBLISH_GUI"] = "pyblish_pype"
|
||||
# Change scale factor only if is not set
|
||||
if "QT_AUTO_SCREEN_SCALE_FACTOR" not in os.environ:
|
||||
os.environ["QT_AUTO_SCREEN_SCALE_FACTOR"] = "1"
|
||||
|
||||
|
||||
def set_addons_environments():
|
||||
"""Set global environments for OpenPype modules.
|
||||
|
||||
This requires to have OpenPype in `sys.path`.
|
||||
"""
|
||||
|
||||
import acre
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
modules_manager = ModulesManager()
|
||||
|
||||
# Merge environments with current environments and update values
|
||||
if module_envs := modules_manager.collect_global_environments():
|
||||
parsed_envs = acre.parse(module_envs)
|
||||
env = acre.merge(parsed_envs, dict(os.environ))
|
||||
os.environ.clear()
|
||||
os.environ.update(env)
|
||||
|
||||
|
||||
def _connect_to_ayon_server():
|
||||
load_environments()
|
||||
if not need_server_or_login():
|
||||
create_global_connection()
|
||||
return
|
||||
|
||||
if HEADLESS_MODE_ENABLED:
|
||||
_print("!!! Cannot open v4 Login dialog in headless mode.")
|
||||
_print((
|
||||
"!!! Please use `{}` to specify server address"
|
||||
" and '{}' to specify user's token."
|
||||
).format(SERVER_URL_ENV_KEY, SERVER_API_ENV_KEY))
|
||||
sys.exit(1)
|
||||
|
||||
current_url = os.environ.get(SERVER_URL_ENV_KEY)
|
||||
url, token, username = ask_to_login_ui(current_url, always_on_top=True)
|
||||
if url is not None and token is not None:
|
||||
confirm_server_login(url, token, username)
|
||||
return
|
||||
|
||||
if url is not None:
|
||||
add_server(url, username)
|
||||
|
||||
_print("!!! Login was not successful.")
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def _check_and_update_from_ayon_server():
|
||||
"""Gets addon info from v4, compares with local folder and updates it.
|
||||
|
||||
Raises:
|
||||
RuntimeError
|
||||
"""
|
||||
|
||||
distribution = AyonDistribution()
|
||||
bundle = None
|
||||
bundle_name = None
|
||||
try:
|
||||
bundle = distribution.bundle_to_use
|
||||
if bundle is not None:
|
||||
bundle_name = bundle.name
|
||||
except BundleNotFoundError as exc:
|
||||
bundle_name = exc.bundle_name
|
||||
|
||||
if bundle is None:
|
||||
url = get_base_url()
|
||||
if not HEADLESS_MODE_ENABLED:
|
||||
show_missing_bundle_information(url, bundle_name)
|
||||
|
||||
elif bundle_name:
|
||||
_print((
|
||||
f"!!! Requested release bundle '{bundle_name}'"
|
||||
" is not available on server."
|
||||
))
|
||||
_print(
|
||||
"!!! Check if selected release bundle"
|
||||
f" is available on the server '{url}'."
|
||||
)
|
||||
|
||||
else:
|
||||
mode = "staging" if is_staging_enabled() else "production"
|
||||
_print(
|
||||
f"!!! No release bundle is set as {mode} on the AYON server."
|
||||
)
|
||||
_print(
|
||||
"!!! Make sure there is a release bundle set"
|
||||
f" as \"{mode}\" on the AYON server '{url}'."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
distribution.distribute()
|
||||
distribution.validate_distribution()
|
||||
os.environ["AYON_BUNDLE_NAME"] = bundle_name
|
||||
|
||||
python_paths = [
|
||||
path
|
||||
for path in os.getenv("PYTHONPATH", "").split(os.pathsep)
|
||||
if path
|
||||
]
|
||||
|
||||
for path in distribution.get_sys_paths():
|
||||
sys.path.insert(0, path)
|
||||
if path not in python_paths:
|
||||
python_paths.append(path)
|
||||
os.environ["PYTHONPATH"] = os.pathsep.join(python_paths)
|
||||
|
||||
|
||||
def boot():
|
||||
"""Bootstrap OpenPype."""
|
||||
|
||||
from openpype.version import __version__
|
||||
|
||||
# TODO load version
|
||||
os.environ["OPENPYPE_VERSION"] = __version__
|
||||
os.environ["AYON_VERSION"] = __version__
|
||||
|
||||
_connect_to_ayon_server()
|
||||
_check_and_update_from_ayon_server()
|
||||
|
||||
# delete OpenPype module and it's submodules from cache so it is used from
|
||||
# specific version
|
||||
modules_to_del = [
|
||||
sys.modules.pop(module_name)
|
||||
for module_name in tuple(sys.modules)
|
||||
if module_name == "openpype" or module_name.startswith("openpype.")
|
||||
]
|
||||
|
||||
for module_name in modules_to_del:
|
||||
with contextlib.suppress(AttributeError, KeyError):
|
||||
del sys.modules[module_name]
|
||||
|
||||
|
||||
def main_cli():
|
||||
from openpype import cli
|
||||
from openpype.version import __version__
|
||||
from openpype.lib import terminal as t
|
||||
|
||||
_print(">>> loading environments ...")
|
||||
_print(" - global AYON ...")
|
||||
set_global_environments()
|
||||
_print(" - for addons ...")
|
||||
set_addons_environments()
|
||||
|
||||
# print info when not running scripts defined in 'silent commands'
|
||||
if not SKIP_HEADERS:
|
||||
info = get_info(is_staging_enabled())
|
||||
info.insert(0, f">>> Using AYON from [ {AYON_ROOT} ]")
|
||||
|
||||
t_width = 20
|
||||
with contextlib.suppress(ValueError, OSError):
|
||||
t_width = os.get_terminal_size().columns - 2
|
||||
|
||||
_header = f"*** AYON [{__version__}] "
|
||||
info.insert(0, _header + "-" * (t_width - len(_header)))
|
||||
|
||||
for i in info:
|
||||
t.echo(i)
|
||||
|
||||
try:
|
||||
cli.main(obj={}, prog_name="ayon")
|
||||
except Exception: # noqa
|
||||
exc_info = sys.exc_info()
|
||||
_print("!!! AYON crashed:")
|
||||
traceback.print_exception(*exc_info)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def script_cli():
|
||||
"""Run and execute script."""
|
||||
|
||||
filepath = os.path.abspath(sys.argv[1])
|
||||
|
||||
# Find '__main__.py' in directory
|
||||
if os.path.isdir(filepath):
|
||||
new_filepath = os.path.join(filepath, "__main__.py")
|
||||
if not os.path.exists(new_filepath):
|
||||
raise RuntimeError(
|
||||
f"can't find '__main__' module in '{filepath}'")
|
||||
filepath = new_filepath
|
||||
|
||||
# Add parent dir to sys path
|
||||
sys.path.insert(0, os.path.dirname(filepath))
|
||||
|
||||
# Read content and execute
|
||||
with open(filepath, "r") as stream:
|
||||
content = stream.read()
|
||||
|
||||
exec(compile(content, filepath, "exec"), globals())
|
||||
|
||||
|
||||
def get_info(use_staging=None) -> list:
|
||||
"""Print additional information to console."""
|
||||
|
||||
inf = []
|
||||
if use_staging:
|
||||
inf.append(("AYON variant", "staging"))
|
||||
else:
|
||||
inf.append(("AYON variant", "production"))
|
||||
inf.append(("AYON bundle", os.getenv("AYON_BUNDLE")))
|
||||
|
||||
# NOTE add addons information
|
||||
|
||||
maximum = max(len(i[0]) for i in inf)
|
||||
formatted = []
|
||||
for info in inf:
|
||||
padding = (maximum - len(info[0])) + 1
|
||||
formatted.append(f'... {info[0]}:{" " * padding}[ {info[1]} ]')
|
||||
return formatted
|
||||
|
||||
|
||||
def main():
|
||||
if not SKIP_BOOTSTRAP:
|
||||
boot()
|
||||
|
||||
args = list(sys.argv)
|
||||
args.pop(0)
|
||||
if args and os.path.exists(args[0]):
|
||||
script_cli()
|
||||
else:
|
||||
main_cli()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
from .utils import (
|
||||
IS_BUILT_APPLICATION,
|
||||
is_staging_enabled,
|
||||
get_local_site_id,
|
||||
get_ayon_appdirs,
|
||||
get_ayon_launch_args,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"IS_BUILT_APPLICATION",
|
||||
"is_staging_enabled",
|
||||
"get_local_site_id",
|
||||
"get_ayon_appdirs",
|
||||
"get_ayon_launch_args",
|
||||
)
|
||||
|
|
@ -1,511 +0,0 @@
|
|||
"""Handle credentials and connection to server for client application.
|
||||
|
||||
Cache and store used server urls. Store/load API keys to/from keyring if
|
||||
needed. Store metadata about used urls, usernames for the urls and when was
|
||||
the connection with the username established.
|
||||
|
||||
On bootstrap is created global connection with information about site and
|
||||
client version. The connection object lives in 'ayon_api'.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import platform
|
||||
import datetime
|
||||
import contextlib
|
||||
import subprocess
|
||||
import tempfile
|
||||
from typing import Optional, Union, Any
|
||||
|
||||
import ayon_api
|
||||
|
||||
from ayon_api.constants import SERVER_URL_ENV_KEY, SERVER_API_ENV_KEY
|
||||
from ayon_api.exceptions import UrlError
|
||||
from ayon_api.utils import (
|
||||
validate_url,
|
||||
is_token_valid,
|
||||
logout_from_server,
|
||||
)
|
||||
|
||||
from ayon_common.utils import (
|
||||
get_ayon_appdirs,
|
||||
get_local_site_id,
|
||||
get_ayon_launch_args,
|
||||
is_staging_enabled,
|
||||
)
|
||||
|
||||
|
||||
class ChangeUserResult:
|
||||
def __init__(
|
||||
self, logged_out, old_url, old_token, old_username,
|
||||
new_url, new_token, new_username
|
||||
):
|
||||
shutdown = logged_out
|
||||
restart = new_url is not None and new_url != old_url
|
||||
token_changed = new_token is not None and new_token != old_token
|
||||
|
||||
self.logged_out = logged_out
|
||||
self.old_url = old_url
|
||||
self.old_token = old_token
|
||||
self.old_username = old_username
|
||||
self.new_url = new_url
|
||||
self.new_token = new_token
|
||||
self.new_username = new_username
|
||||
|
||||
self.shutdown = shutdown
|
||||
self.restart = restart
|
||||
self.token_changed = token_changed
|
||||
|
||||
|
||||
def _get_servers_path():
|
||||
return get_ayon_appdirs("used_servers.json")
|
||||
|
||||
|
||||
def get_servers_info_data():
|
||||
"""Metadata about used server on this machine.
|
||||
|
||||
Store data about all used server urls, last used url and user username for
|
||||
the url. Using this metadata we can remember which username was used per
|
||||
url if token stored in keyring loose lifetime.
|
||||
|
||||
Returns:
|
||||
dict[str, Any]: Information about servers.
|
||||
"""
|
||||
|
||||
data = {}
|
||||
servers_info_path = _get_servers_path()
|
||||
if not os.path.exists(servers_info_path):
|
||||
dirpath = os.path.dirname(servers_info_path)
|
||||
if not os.path.exists(dirpath):
|
||||
os.makedirs(dirpath)
|
||||
|
||||
return data
|
||||
|
||||
with open(servers_info_path, "r") as stream:
|
||||
with contextlib.suppress(BaseException):
|
||||
data = json.load(stream)
|
||||
return data
|
||||
|
||||
|
||||
def add_server(url: str, username: str):
|
||||
"""Add server to server info metadata.
|
||||
|
||||
This function will also mark the url as last used url on the machine so on
|
||||
next launch will be used.
|
||||
|
||||
Args:
|
||||
url (str): Server url.
|
||||
username (str): Name of user used to log in.
|
||||
"""
|
||||
|
||||
servers_info_path = _get_servers_path()
|
||||
data = get_servers_info_data()
|
||||
data["last_server"] = url
|
||||
if "urls" not in data:
|
||||
data["urls"] = {}
|
||||
data["urls"][url] = {
|
||||
"updated_dt": datetime.datetime.now().strftime("%Y/%m/%d %H:%M:%S"),
|
||||
"username": username,
|
||||
}
|
||||
|
||||
with open(servers_info_path, "w") as stream:
|
||||
json.dump(data, stream)
|
||||
|
||||
|
||||
def remove_server(url: str):
|
||||
"""Remove server url from servers information.
|
||||
|
||||
This should be used on logout to completelly loose information about server
|
||||
on the machine.
|
||||
|
||||
Args:
|
||||
url (str): Server url.
|
||||
"""
|
||||
|
||||
if not url:
|
||||
return
|
||||
|
||||
servers_info_path = _get_servers_path()
|
||||
data = get_servers_info_data()
|
||||
if data.get("last_server") == url:
|
||||
data["last_server"] = None
|
||||
|
||||
if "urls" in data:
|
||||
data["urls"].pop(url, None)
|
||||
|
||||
with open(servers_info_path, "w") as stream:
|
||||
json.dump(data, stream)
|
||||
|
||||
|
||||
def get_last_server(
|
||||
data: Optional[dict[str, Any]] = None
|
||||
) -> Union[str, None]:
|
||||
"""Last server used to log in on this machine.
|
||||
|
||||
Args:
|
||||
data (Optional[dict[str, Any]]): Prepared server information data.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Last used server url.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = get_servers_info_data()
|
||||
return data.get("last_server")
|
||||
|
||||
|
||||
def get_last_username_by_url(
|
||||
url: str,
|
||||
data: Optional[dict[str, Any]] = None
|
||||
) -> Union[str, None]:
|
||||
"""Get last username which was used for passed url.
|
||||
|
||||
Args:
|
||||
url (str): Server url.
|
||||
data (Optional[dict[str, Any]]): Servers info.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Username.
|
||||
"""
|
||||
|
||||
if not url:
|
||||
return None
|
||||
|
||||
if data is None:
|
||||
data = get_servers_info_data()
|
||||
|
||||
if urls := data.get("urls"):
|
||||
if url_info := urls.get(url):
|
||||
return url_info.get("username")
|
||||
return None
|
||||
|
||||
|
||||
def get_last_server_with_username():
|
||||
"""Receive last server and username used in last connection.
|
||||
|
||||
Returns:
|
||||
tuple[Union[str, None], Union[str, None]]: Url and username.
|
||||
"""
|
||||
|
||||
data = get_servers_info_data()
|
||||
url = get_last_server(data)
|
||||
username = get_last_username_by_url(url)
|
||||
return url, username
|
||||
|
||||
|
||||
class TokenKeyring:
|
||||
# Fake username with hardcoded username
|
||||
username_key = "username"
|
||||
|
||||
def __init__(self, url):
|
||||
try:
|
||||
import keyring
|
||||
|
||||
except Exception as exc:
|
||||
raise NotImplementedError(
|
||||
"Python module `keyring` is not available."
|
||||
) from exc
|
||||
|
||||
# hack for cx_freeze and Windows keyring backend
|
||||
if platform.system().lower() == "windows":
|
||||
from keyring.backends import Windows
|
||||
|
||||
keyring.set_keyring(Windows.WinVaultKeyring())
|
||||
|
||||
self._url = url
|
||||
self._keyring_key = f"AYON/{url}"
|
||||
|
||||
def get_value(self):
|
||||
import keyring
|
||||
|
||||
return keyring.get_password(self._keyring_key, self.username_key)
|
||||
|
||||
def set_value(self, value):
|
||||
import keyring
|
||||
|
||||
if value is not None:
|
||||
keyring.set_password(self._keyring_key, self.username_key, value)
|
||||
return
|
||||
|
||||
with contextlib.suppress(keyring.errors.PasswordDeleteError):
|
||||
keyring.delete_password(self._keyring_key, self.username_key)
|
||||
|
||||
|
||||
def load_token(url: str) -> Union[str, None]:
|
||||
"""Get token for url from keyring.
|
||||
|
||||
Args:
|
||||
url (str): Server url.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Token for passed url available in keyring.
|
||||
"""
|
||||
|
||||
return TokenKeyring(url).get_value()
|
||||
|
||||
|
||||
def store_token(url: str, token: str):
|
||||
"""Store token by url to keyring.
|
||||
|
||||
Args:
|
||||
url (str): Server url.
|
||||
token (str): User token to server.
|
||||
"""
|
||||
|
||||
TokenKeyring(url).set_value(token)
|
||||
|
||||
|
||||
def ask_to_login_ui(
|
||||
url: Optional[str] = None,
|
||||
always_on_top: Optional[bool] = False
|
||||
) -> tuple[str, str, str]:
|
||||
"""Ask user to login using UI.
|
||||
|
||||
This should be used only when user is not yet logged in at all or available
|
||||
credentials are invalid. To change credentials use 'change_user_ui'
|
||||
function.
|
||||
|
||||
Use a subprocess to show UI.
|
||||
|
||||
Args:
|
||||
url (Optional[str]): Server url that could be prefilled in UI.
|
||||
always_on_top (Optional[bool]): Window will be drawn on top of
|
||||
other windows.
|
||||
|
||||
Returns:
|
||||
tuple[str, str, str]: Url, user's token and username.
|
||||
"""
|
||||
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
ui_dir = os.path.join(current_dir, "ui")
|
||||
|
||||
if url is None:
|
||||
url = get_last_server()
|
||||
username = get_last_username_by_url(url)
|
||||
data = {
|
||||
"url": url,
|
||||
"username": username,
|
||||
"always_on_top": always_on_top,
|
||||
}
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w", prefix="ayon_login", suffix=".json", delete=False
|
||||
) as tmp:
|
||||
output = tmp.name
|
||||
json.dump(data, tmp)
|
||||
|
||||
code = subprocess.call(
|
||||
get_ayon_launch_args(ui_dir, "--skip-bootstrap", output))
|
||||
if code != 0:
|
||||
raise RuntimeError("Failed to show login UI")
|
||||
|
||||
with open(output, "r") as stream:
|
||||
data = json.load(stream)
|
||||
os.remove(output)
|
||||
return data["output"]
|
||||
|
||||
|
||||
def change_user_ui() -> ChangeUserResult:
|
||||
"""Change user using UI.
|
||||
|
||||
Show UI to user where he can change credentials or url. Output will contain
|
||||
all information about old/new values of url, username, api key. If user
|
||||
confirmed or declined values.
|
||||
|
||||
Returns:
|
||||
ChangeUserResult: Information about user change.
|
||||
"""
|
||||
|
||||
from .ui import change_user
|
||||
|
||||
url, username = get_last_server_with_username()
|
||||
token = load_token(url)
|
||||
result = change_user(url, username, token)
|
||||
new_url, new_token, new_username, logged_out = result
|
||||
|
||||
output = ChangeUserResult(
|
||||
logged_out, url, token, username,
|
||||
new_url, new_token, new_username
|
||||
)
|
||||
if output.logged_out:
|
||||
logout(url, token)
|
||||
|
||||
elif output.token_changed:
|
||||
change_token(
|
||||
output.new_url,
|
||||
output.new_token,
|
||||
output.new_username,
|
||||
output.old_url
|
||||
)
|
||||
return output
|
||||
|
||||
|
||||
def change_token(
|
||||
url: str,
|
||||
token: str,
|
||||
username: Optional[str] = None,
|
||||
old_url: Optional[str] = None
|
||||
):
|
||||
"""Change url and token in currently running session.
|
||||
|
||||
Function can also change server url, in that case are previous credentials
|
||||
NOT removed from cache.
|
||||
|
||||
Args:
|
||||
url (str): Url to server.
|
||||
token (str): New token to be used for url connection.
|
||||
username (Optional[str]): Username of logged user.
|
||||
old_url (Optional[str]): Previous url. Value from 'get_last_server'
|
||||
is used if not entered.
|
||||
"""
|
||||
|
||||
if old_url is None:
|
||||
old_url = get_last_server()
|
||||
if old_url and old_url == url:
|
||||
remove_url_cache(old_url)
|
||||
|
||||
# TODO check if ayon_api is already connected
|
||||
add_server(url, username)
|
||||
store_token(url, token)
|
||||
ayon_api.change_token(url, token)
|
||||
|
||||
|
||||
def remove_url_cache(url: str):
|
||||
"""Clear cache for server url.
|
||||
|
||||
Args:
|
||||
url (str): Server url which is removed from cache.
|
||||
"""
|
||||
|
||||
store_token(url, None)
|
||||
|
||||
|
||||
def remove_token_cache(url: str, token: str):
|
||||
"""Remove token from local cache of url.
|
||||
|
||||
Is skipped if cached token under the passed url is not the same
|
||||
as passed token.
|
||||
|
||||
Args:
|
||||
url (str): Url to server.
|
||||
token (str): Token to be removed from url cache.
|
||||
"""
|
||||
|
||||
if load_token(url) == token:
|
||||
remove_url_cache(url)
|
||||
|
||||
|
||||
def logout(url: str, token: str):
|
||||
"""Logout from server and throw token away.
|
||||
|
||||
Args:
|
||||
url (str): Url from which should be logged out.
|
||||
token (str): Token which should be used to log out.
|
||||
"""
|
||||
|
||||
remove_server(url)
|
||||
ayon_api.close_connection()
|
||||
ayon_api.set_environments(None, None)
|
||||
remove_token_cache(url, token)
|
||||
logout_from_server(url, token)
|
||||
|
||||
|
||||
def load_environments():
|
||||
"""Load environments on startup.
|
||||
|
||||
Handle environments needed for connection with server. Environments are
|
||||
'AYON_SERVER_URL' and 'AYON_API_KEY'.
|
||||
|
||||
Server is looked up from environment. Already set environent is not
|
||||
changed. If environemnt is not filled then last server stored in appdirs
|
||||
is used.
|
||||
|
||||
Token is skipped if url is not available. Otherwise, is also checked from
|
||||
env and if is not available then uses 'load_token' to try to get token
|
||||
based on server url.
|
||||
"""
|
||||
|
||||
server_url = os.environ.get(SERVER_URL_ENV_KEY)
|
||||
if not server_url:
|
||||
server_url = get_last_server()
|
||||
if not server_url:
|
||||
return
|
||||
os.environ[SERVER_URL_ENV_KEY] = server_url
|
||||
|
||||
if not os.environ.get(SERVER_API_ENV_KEY):
|
||||
if token := load_token(server_url):
|
||||
os.environ[SERVER_API_ENV_KEY] = token
|
||||
|
||||
|
||||
def set_environments(url: str, token: str):
|
||||
"""Change url and token environemnts in currently running process.
|
||||
|
||||
Args:
|
||||
url (str): New server url.
|
||||
token (str): User's token.
|
||||
"""
|
||||
|
||||
ayon_api.set_environments(url, token)
|
||||
|
||||
|
||||
def create_global_connection():
|
||||
"""Create global connection with site id and client version.
|
||||
|
||||
Make sure the global connection in 'ayon_api' have entered site id and
|
||||
client version.
|
||||
|
||||
Set default settings variant to use based on 'is_staging_enabled'.
|
||||
"""
|
||||
|
||||
ayon_api.create_connection(
|
||||
get_local_site_id(), os.environ.get("AYON_VERSION")
|
||||
)
|
||||
ayon_api.set_default_settings_variant(
|
||||
"staging" if is_staging_enabled() else "production"
|
||||
)
|
||||
|
||||
|
||||
def need_server_or_login() -> bool:
|
||||
"""Check if server url or login to the server are needed.
|
||||
|
||||
It is recommended to call 'load_environments' on startup before this check.
|
||||
But in some cases this function could be called after startup.
|
||||
|
||||
Returns:
|
||||
bool: 'True' if server and token are available. Otherwise 'False'.
|
||||
"""
|
||||
|
||||
server_url = os.environ.get(SERVER_URL_ENV_KEY)
|
||||
if not server_url:
|
||||
return True
|
||||
|
||||
try:
|
||||
server_url = validate_url(server_url)
|
||||
except UrlError:
|
||||
return True
|
||||
|
||||
token = os.environ.get(SERVER_API_ENV_KEY)
|
||||
if token:
|
||||
return not is_token_valid(server_url, token)
|
||||
|
||||
token = load_token(server_url)
|
||||
if token:
|
||||
return not is_token_valid(server_url, token)
|
||||
return True
|
||||
|
||||
|
||||
def confirm_server_login(url, token, username):
|
||||
"""Confirm login of user and do necessary stepts to apply changes.
|
||||
|
||||
This should not be used on "change" of user but on first login.
|
||||
|
||||
Args:
|
||||
url (str): Server url where user authenticated.
|
||||
token (str): API token used for authentication to server.
|
||||
username (Union[str, None]): Username related to API token.
|
||||
"""
|
||||
|
||||
add_server(url, username)
|
||||
store_token(url, token)
|
||||
set_environments(url, token)
|
||||
create_global_connection()
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
from .login_window import (
|
||||
ServerLoginWindow,
|
||||
ask_to_login,
|
||||
change_user,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"ServerLoginWindow",
|
||||
"ask_to_login",
|
||||
"change_user",
|
||||
)
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
import sys
|
||||
import json
|
||||
|
||||
from ayon_common.connection.ui.login_window import ask_to_login
|
||||
|
||||
|
||||
def main(output_path):
|
||||
with open(output_path, "r") as stream:
|
||||
data = json.load(stream)
|
||||
|
||||
url = data.get("url")
|
||||
username = data.get("username")
|
||||
always_on_top = data.get("always_on_top", False)
|
||||
out_url, out_token, out_username = ask_to_login(
|
||||
url, username, always_on_top=always_on_top)
|
||||
|
||||
data["output"] = [out_url, out_token, out_username]
|
||||
with open(output_path, "w") as stream:
|
||||
json.dump(data, stream)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv[-1])
|
||||
|
|
@ -1,710 +0,0 @@
|
|||
import traceback
|
||||
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
|
||||
from ayon_api.exceptions import UrlError
|
||||
from ayon_api.utils import validate_url, login_to_server
|
||||
|
||||
from ayon_common.resources import (
|
||||
get_resource_path,
|
||||
get_icon_path,
|
||||
load_stylesheet,
|
||||
)
|
||||
from ayon_common.ui_utils import set_style_property, get_qt_app
|
||||
|
||||
from .widgets import (
|
||||
PressHoverButton,
|
||||
PlaceholderLineEdit,
|
||||
)
|
||||
|
||||
|
||||
class LogoutConfirmDialog(QtWidgets.QDialog):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.setWindowTitle("Logout confirmation")
|
||||
|
||||
message_widget = QtWidgets.QWidget(self)
|
||||
|
||||
message_label = QtWidgets.QLabel(
|
||||
(
|
||||
"You are going to logout. This action will close this"
|
||||
" application and will invalidate your login."
|
||||
" All other applications launched with this login won't be"
|
||||
" able to use it anymore.<br/><br/>"
|
||||
"You can cancel logout and only change server and user login"
|
||||
" in login dialog.<br/><br/>"
|
||||
"Press OK to confirm logout."
|
||||
),
|
||||
message_widget
|
||||
)
|
||||
message_label.setWordWrap(True)
|
||||
|
||||
message_layout = QtWidgets.QHBoxLayout(message_widget)
|
||||
message_layout.setContentsMargins(0, 0, 0, 0)
|
||||
message_layout.addWidget(message_label, 1)
|
||||
|
||||
sep_frame = QtWidgets.QFrame(self)
|
||||
sep_frame.setObjectName("Separator")
|
||||
sep_frame.setMinimumHeight(2)
|
||||
sep_frame.setMaximumHeight(2)
|
||||
|
||||
footer_widget = QtWidgets.QWidget(self)
|
||||
|
||||
cancel_btn = QtWidgets.QPushButton("Cancel", footer_widget)
|
||||
confirm_btn = QtWidgets.QPushButton("OK", footer_widget)
|
||||
|
||||
footer_layout = QtWidgets.QHBoxLayout(footer_widget)
|
||||
footer_layout.setContentsMargins(0, 0, 0, 0)
|
||||
footer_layout.addStretch(1)
|
||||
footer_layout.addWidget(cancel_btn, 0)
|
||||
footer_layout.addWidget(confirm_btn, 0)
|
||||
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
main_layout.addWidget(message_widget, 0)
|
||||
main_layout.addStretch(1)
|
||||
main_layout.addWidget(sep_frame, 0)
|
||||
main_layout.addWidget(footer_widget, 0)
|
||||
|
||||
cancel_btn.clicked.connect(self._on_cancel_click)
|
||||
confirm_btn.clicked.connect(self._on_confirm_click)
|
||||
|
||||
self._cancel_btn = cancel_btn
|
||||
self._confirm_btn = confirm_btn
|
||||
self._result = False
|
||||
|
||||
def showEvent(self, event):
|
||||
super().showEvent(event)
|
||||
self._match_btns_sizes()
|
||||
|
||||
def resizeEvent(self, event):
|
||||
super().resizeEvent(event)
|
||||
self._match_btns_sizes()
|
||||
|
||||
def _match_btns_sizes(self):
|
||||
width = max(
|
||||
self._cancel_btn.sizeHint().width(),
|
||||
self._confirm_btn.sizeHint().width()
|
||||
)
|
||||
self._cancel_btn.setMinimumWidth(width)
|
||||
self._confirm_btn.setMinimumWidth(width)
|
||||
|
||||
def _on_cancel_click(self):
|
||||
self._result = False
|
||||
self.reject()
|
||||
|
||||
def _on_confirm_click(self):
|
||||
self._result = True
|
||||
self.accept()
|
||||
|
||||
def get_result(self):
|
||||
return self._result
|
||||
|
||||
|
||||
class ServerLoginWindow(QtWidgets.QDialog):
|
||||
default_width = 410
|
||||
default_height = 170
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
icon_path = get_icon_path()
|
||||
icon = QtGui.QIcon(icon_path)
|
||||
self.setWindowIcon(icon)
|
||||
self.setWindowTitle("Login to server")
|
||||
|
||||
edit_icon_path = get_resource_path("edit.png")
|
||||
edit_icon = QtGui.QIcon(edit_icon_path)
|
||||
|
||||
# --- URL page ---
|
||||
login_widget = QtWidgets.QWidget(self)
|
||||
|
||||
user_cred_widget = QtWidgets.QWidget(login_widget)
|
||||
|
||||
url_label = QtWidgets.QLabel("URL:", user_cred_widget)
|
||||
|
||||
url_widget = QtWidgets.QWidget(user_cred_widget)
|
||||
|
||||
url_input = PlaceholderLineEdit(url_widget)
|
||||
url_input.setPlaceholderText("< https://ayon.server.com >")
|
||||
|
||||
url_preview = QtWidgets.QLineEdit(url_widget)
|
||||
url_preview.setReadOnly(True)
|
||||
url_preview.setObjectName("LikeDisabledInput")
|
||||
|
||||
url_edit_btn = PressHoverButton(user_cred_widget)
|
||||
url_edit_btn.setIcon(edit_icon)
|
||||
url_edit_btn.setObjectName("PasswordBtn")
|
||||
|
||||
url_layout = QtWidgets.QHBoxLayout(url_widget)
|
||||
url_layout.setContentsMargins(0, 0, 0, 0)
|
||||
url_layout.addWidget(url_input, 1)
|
||||
url_layout.addWidget(url_preview, 1)
|
||||
|
||||
# --- URL separator ---
|
||||
url_cred_sep = QtWidgets.QFrame(self)
|
||||
url_cred_sep.setObjectName("Separator")
|
||||
url_cred_sep.setMinimumHeight(2)
|
||||
url_cred_sep.setMaximumHeight(2)
|
||||
|
||||
# --- Login page ---
|
||||
username_label = QtWidgets.QLabel("Username:", user_cred_widget)
|
||||
|
||||
username_widget = QtWidgets.QWidget(user_cred_widget)
|
||||
|
||||
username_input = PlaceholderLineEdit(username_widget)
|
||||
username_input.setPlaceholderText("< Artist >")
|
||||
|
||||
username_preview = QtWidgets.QLineEdit(username_widget)
|
||||
username_preview.setReadOnly(True)
|
||||
username_preview.setObjectName("LikeDisabledInput")
|
||||
|
||||
username_edit_btn = PressHoverButton(user_cred_widget)
|
||||
username_edit_btn.setIcon(edit_icon)
|
||||
username_edit_btn.setObjectName("PasswordBtn")
|
||||
|
||||
username_layout = QtWidgets.QHBoxLayout(username_widget)
|
||||
username_layout.setContentsMargins(0, 0, 0, 0)
|
||||
username_layout.addWidget(username_input, 1)
|
||||
username_layout.addWidget(username_preview, 1)
|
||||
|
||||
password_label = QtWidgets.QLabel("Password:", user_cred_widget)
|
||||
password_input = PlaceholderLineEdit(user_cred_widget)
|
||||
password_input.setPlaceholderText("< *********** >")
|
||||
password_input.setEchoMode(PlaceholderLineEdit.Password)
|
||||
|
||||
api_label = QtWidgets.QLabel("API key:", user_cred_widget)
|
||||
api_preview = QtWidgets.QLineEdit(user_cred_widget)
|
||||
api_preview.setReadOnly(True)
|
||||
api_preview.setObjectName("LikeDisabledInput")
|
||||
|
||||
show_password_icon_path = get_resource_path("eye.png")
|
||||
show_password_icon = QtGui.QIcon(show_password_icon_path)
|
||||
show_password_btn = PressHoverButton(user_cred_widget)
|
||||
show_password_btn.setObjectName("PasswordBtn")
|
||||
show_password_btn.setIcon(show_password_icon)
|
||||
show_password_btn.setFocusPolicy(QtCore.Qt.ClickFocus)
|
||||
|
||||
cred_msg_sep = QtWidgets.QFrame(self)
|
||||
cred_msg_sep.setObjectName("Separator")
|
||||
cred_msg_sep.setMinimumHeight(2)
|
||||
cred_msg_sep.setMaximumHeight(2)
|
||||
|
||||
# --- Credentials inputs ---
|
||||
user_cred_layout = QtWidgets.QGridLayout(user_cred_widget)
|
||||
user_cred_layout.setContentsMargins(0, 0, 0, 0)
|
||||
row = 0
|
||||
|
||||
user_cred_layout.addWidget(url_label, row, 0, 1, 1)
|
||||
user_cred_layout.addWidget(url_widget, row, 1, 1, 1)
|
||||
user_cred_layout.addWidget(url_edit_btn, row, 2, 1, 1)
|
||||
row += 1
|
||||
|
||||
user_cred_layout.addWidget(url_cred_sep, row, 0, 1, 3)
|
||||
row += 1
|
||||
|
||||
user_cred_layout.addWidget(username_label, row, 0, 1, 1)
|
||||
user_cred_layout.addWidget(username_widget, row, 1, 1, 1)
|
||||
user_cred_layout.addWidget(username_edit_btn, row, 2, 2, 1)
|
||||
row += 1
|
||||
|
||||
user_cred_layout.addWidget(api_label, row, 0, 1, 1)
|
||||
user_cred_layout.addWidget(api_preview, row, 1, 1, 1)
|
||||
row += 1
|
||||
|
||||
user_cred_layout.addWidget(password_label, row, 0, 1, 1)
|
||||
user_cred_layout.addWidget(password_input, row, 1, 1, 1)
|
||||
user_cred_layout.addWidget(show_password_btn, row, 2, 1, 1)
|
||||
row += 1
|
||||
|
||||
user_cred_layout.addWidget(cred_msg_sep, row, 0, 1, 3)
|
||||
row += 1
|
||||
|
||||
user_cred_layout.setColumnStretch(0, 0)
|
||||
user_cred_layout.setColumnStretch(1, 1)
|
||||
user_cred_layout.setColumnStretch(2, 0)
|
||||
|
||||
login_layout = QtWidgets.QVBoxLayout(login_widget)
|
||||
login_layout.setContentsMargins(0, 0, 0, 0)
|
||||
login_layout.addWidget(user_cred_widget, 1)
|
||||
|
||||
# --- Messages ---
|
||||
# Messages for users (e.g. invalid url etc.)
|
||||
message_label = QtWidgets.QLabel(self)
|
||||
message_label.setWordWrap(True)
|
||||
message_label.setTextInteractionFlags(QtCore.Qt.TextBrowserInteraction)
|
||||
|
||||
footer_widget = QtWidgets.QWidget(self)
|
||||
logout_btn = QtWidgets.QPushButton("Logout", footer_widget)
|
||||
user_message = QtWidgets.QLabel(footer_widget)
|
||||
login_btn = QtWidgets.QPushButton("Login", footer_widget)
|
||||
confirm_btn = QtWidgets.QPushButton("Confirm", footer_widget)
|
||||
|
||||
footer_layout = QtWidgets.QHBoxLayout(footer_widget)
|
||||
footer_layout.setContentsMargins(0, 0, 0, 0)
|
||||
footer_layout.addWidget(logout_btn, 0)
|
||||
footer_layout.addWidget(user_message, 1)
|
||||
footer_layout.addWidget(login_btn, 0)
|
||||
footer_layout.addWidget(confirm_btn, 0)
|
||||
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
main_layout.addWidget(login_widget, 0)
|
||||
main_layout.addWidget(message_label, 0)
|
||||
main_layout.addStretch(1)
|
||||
main_layout.addWidget(footer_widget, 0)
|
||||
|
||||
url_input.textChanged.connect(self._on_url_change)
|
||||
url_input.returnPressed.connect(self._on_url_enter_press)
|
||||
username_input.textChanged.connect(self._on_user_change)
|
||||
username_input.returnPressed.connect(self._on_username_enter_press)
|
||||
password_input.returnPressed.connect(self._on_password_enter_press)
|
||||
show_password_btn.change_state.connect(self._on_show_password)
|
||||
url_edit_btn.clicked.connect(self._on_url_edit_click)
|
||||
username_edit_btn.clicked.connect(self._on_username_edit_click)
|
||||
logout_btn.clicked.connect(self._on_logout_click)
|
||||
login_btn.clicked.connect(self._on_login_click)
|
||||
confirm_btn.clicked.connect(self._on_login_click)
|
||||
|
||||
self._message_label = message_label
|
||||
|
||||
self._url_widget = url_widget
|
||||
self._url_input = url_input
|
||||
self._url_preview = url_preview
|
||||
self._url_edit_btn = url_edit_btn
|
||||
|
||||
self._login_widget = login_widget
|
||||
|
||||
self._user_cred_widget = user_cred_widget
|
||||
self._username_input = username_input
|
||||
self._username_preview = username_preview
|
||||
self._username_edit_btn = username_edit_btn
|
||||
|
||||
self._password_label = password_label
|
||||
self._password_input = password_input
|
||||
self._show_password_btn = show_password_btn
|
||||
self._api_label = api_label
|
||||
self._api_preview = api_preview
|
||||
|
||||
self._logout_btn = logout_btn
|
||||
self._user_message = user_message
|
||||
self._login_btn = login_btn
|
||||
self._confirm_btn = confirm_btn
|
||||
|
||||
self._url_is_valid = None
|
||||
self._credentials_are_valid = None
|
||||
self._result = (None, None, None, False)
|
||||
self._first_show = True
|
||||
|
||||
self._allow_logout = False
|
||||
self._logged_in = False
|
||||
self._url_edit_mode = False
|
||||
self._username_edit_mode = False
|
||||
|
||||
def set_allow_logout(self, allow_logout):
|
||||
if allow_logout is self._allow_logout:
|
||||
return
|
||||
self._allow_logout = allow_logout
|
||||
|
||||
self._update_states_by_edit_mode()
|
||||
|
||||
def _set_logged_in(self, logged_in):
|
||||
if logged_in is self._logged_in:
|
||||
return
|
||||
self._logged_in = logged_in
|
||||
|
||||
self._update_states_by_edit_mode()
|
||||
|
||||
def _set_url_edit_mode(self, edit_mode):
|
||||
if self._url_edit_mode is not edit_mode:
|
||||
self._url_edit_mode = edit_mode
|
||||
self._update_states_by_edit_mode()
|
||||
|
||||
def _set_username_edit_mode(self, edit_mode):
|
||||
if self._username_edit_mode is not edit_mode:
|
||||
self._username_edit_mode = edit_mode
|
||||
self._update_states_by_edit_mode()
|
||||
|
||||
def _get_url_user_edit(self):
|
||||
url_edit = True
|
||||
if self._logged_in and not self._url_edit_mode:
|
||||
url_edit = False
|
||||
user_edit = url_edit
|
||||
if not user_edit and self._logged_in and self._username_edit_mode:
|
||||
user_edit = True
|
||||
return url_edit, user_edit
|
||||
|
||||
def _update_states_by_edit_mode(self):
|
||||
url_edit, user_edit = self._get_url_user_edit()
|
||||
|
||||
self._url_preview.setVisible(not url_edit)
|
||||
self._url_input.setVisible(url_edit)
|
||||
self._url_edit_btn.setVisible(self._allow_logout and not url_edit)
|
||||
|
||||
self._username_preview.setVisible(not user_edit)
|
||||
self._username_input.setVisible(user_edit)
|
||||
self._username_edit_btn.setVisible(
|
||||
self._allow_logout and not user_edit
|
||||
)
|
||||
|
||||
self._api_preview.setVisible(not user_edit)
|
||||
self._api_label.setVisible(not user_edit)
|
||||
|
||||
self._password_label.setVisible(user_edit)
|
||||
self._show_password_btn.setVisible(user_edit)
|
||||
self._password_input.setVisible(user_edit)
|
||||
|
||||
self._logout_btn.setVisible(self._allow_logout and self._logged_in)
|
||||
self._login_btn.setVisible(not self._allow_logout)
|
||||
self._confirm_btn.setVisible(self._allow_logout)
|
||||
self._update_login_btn_state(url_edit, user_edit)
|
||||
|
||||
def _update_login_btn_state(self, url_edit=None, user_edit=None, url=None):
|
||||
if url_edit is None:
|
||||
url_edit, user_edit = self._get_url_user_edit()
|
||||
|
||||
if url is None:
|
||||
url = self._url_input.text()
|
||||
|
||||
enabled = bool(url) and (url_edit or user_edit)
|
||||
|
||||
self._login_btn.setEnabled(enabled)
|
||||
self._confirm_btn.setEnabled(enabled)
|
||||
|
||||
def showEvent(self, event):
|
||||
super().showEvent(event)
|
||||
if self._first_show:
|
||||
self._first_show = False
|
||||
self._on_first_show()
|
||||
|
||||
def _on_first_show(self):
|
||||
self.setStyleSheet(load_stylesheet())
|
||||
self.resize(self.default_width, self.default_height)
|
||||
self._center_window()
|
||||
if self._allow_logout is None:
|
||||
self.set_allow_logout(False)
|
||||
|
||||
self._update_states_by_edit_mode()
|
||||
if not self._url_input.text():
|
||||
widget = self._url_input
|
||||
elif not self._username_input.text():
|
||||
widget = self._username_input
|
||||
else:
|
||||
widget = self._password_input
|
||||
|
||||
self._set_input_focus(widget)
|
||||
|
||||
def result(self):
|
||||
"""Result url and token or login.
|
||||
|
||||
Returns:
|
||||
Union[Tuple[str, str], Tuple[None, None]]: Url and token used for
|
||||
login if was successful otherwise are both set to None.
|
||||
"""
|
||||
return self._result
|
||||
|
||||
def _center_window(self):
|
||||
"""Move window to center of screen."""
|
||||
|
||||
if hasattr(QtWidgets.QApplication, "desktop"):
|
||||
desktop = QtWidgets.QApplication.desktop()
|
||||
screen_idx = desktop.screenNumber(self)
|
||||
screen_geo = desktop.screenGeometry(screen_idx)
|
||||
else:
|
||||
screen = self.screen()
|
||||
screen_geo = screen.geometry()
|
||||
|
||||
geo = self.frameGeometry()
|
||||
geo.moveCenter(screen_geo.center())
|
||||
if geo.y() < screen_geo.y():
|
||||
geo.setY(screen_geo.y())
|
||||
self.move(geo.topLeft())
|
||||
|
||||
def _on_url_change(self, text):
|
||||
self._update_login_btn_state(url=text)
|
||||
self._set_url_valid(None)
|
||||
self._set_credentials_valid(None)
|
||||
self._url_preview.setText(text)
|
||||
|
||||
def _set_url_valid(self, valid):
|
||||
if valid is self._url_is_valid:
|
||||
return
|
||||
|
||||
self._url_is_valid = valid
|
||||
self._set_input_valid_state(self._url_input, valid)
|
||||
|
||||
def _set_credentials_valid(self, valid):
|
||||
if self._credentials_are_valid is valid:
|
||||
return
|
||||
|
||||
self._credentials_are_valid = valid
|
||||
self._set_input_valid_state(self._username_input, valid)
|
||||
self._set_input_valid_state(self._password_input, valid)
|
||||
|
||||
def _on_url_enter_press(self):
|
||||
self._set_input_focus(self._username_input)
|
||||
|
||||
def _on_user_change(self, username):
|
||||
self._username_preview.setText(username)
|
||||
|
||||
def _on_username_enter_press(self):
|
||||
self._set_input_focus(self._password_input)
|
||||
|
||||
def _on_password_enter_press(self):
|
||||
self._login()
|
||||
|
||||
def _on_show_password(self, show_password):
|
||||
if show_password:
|
||||
placeholder_text = "< MySecret124 >"
|
||||
echo_mode = QtWidgets.QLineEdit.Normal
|
||||
else:
|
||||
placeholder_text = "< *********** >"
|
||||
echo_mode = QtWidgets.QLineEdit.Password
|
||||
|
||||
self._password_input.setEchoMode(echo_mode)
|
||||
self._password_input.setPlaceholderText(placeholder_text)
|
||||
|
||||
def _on_username_edit_click(self):
|
||||
self._username_edit_mode = True
|
||||
self._update_states_by_edit_mode()
|
||||
|
||||
def _on_url_edit_click(self):
|
||||
self._url_edit_mode = True
|
||||
self._update_states_by_edit_mode()
|
||||
|
||||
def _on_logout_click(self):
|
||||
dialog = LogoutConfirmDialog(self)
|
||||
dialog.exec_()
|
||||
if dialog.get_result():
|
||||
self._result = (None, None, None, True)
|
||||
self.accept()
|
||||
|
||||
def _on_login_click(self):
|
||||
self._login()
|
||||
|
||||
def _validate_url(self):
|
||||
"""Use url from input to connect and change window state on success.
|
||||
|
||||
Todos:
|
||||
Threaded check.
|
||||
"""
|
||||
|
||||
url = self._url_input.text()
|
||||
valid_url = None
|
||||
try:
|
||||
valid_url = validate_url(url)
|
||||
|
||||
except UrlError as exc:
|
||||
parts = [f"<b>{exc.title}</b>"]
|
||||
parts.extend(f"- {hint}" for hint in exc.hints)
|
||||
self._set_message("<br/>".join(parts))
|
||||
|
||||
except KeyboardInterrupt:
|
||||
# Reraise KeyboardInterrupt error
|
||||
raise
|
||||
|
||||
except BaseException:
|
||||
self._set_unexpected_error()
|
||||
return
|
||||
|
||||
if valid_url is None:
|
||||
return False
|
||||
|
||||
self._url_input.setText(valid_url)
|
||||
return True
|
||||
|
||||
def _login(self):
|
||||
if (
|
||||
not self._login_btn.isEnabled()
|
||||
and not self._confirm_btn.isEnabled()
|
||||
):
|
||||
return
|
||||
|
||||
if not self._url_is_valid:
|
||||
self._set_url_valid(self._validate_url())
|
||||
|
||||
if not self._url_is_valid:
|
||||
self._set_input_focus(self._url_input)
|
||||
self._set_credentials_valid(None)
|
||||
return
|
||||
|
||||
self._clear_message()
|
||||
|
||||
url = self._url_input.text()
|
||||
username = self._username_input.text()
|
||||
password = self._password_input.text()
|
||||
try:
|
||||
token = login_to_server(url, username, password)
|
||||
except BaseException:
|
||||
self._set_unexpected_error()
|
||||
return
|
||||
|
||||
if token is not None:
|
||||
self._result = (url, token, username, False)
|
||||
self.accept()
|
||||
return
|
||||
|
||||
self._set_credentials_valid(False)
|
||||
message_lines = ["<b>Invalid credentials</b>"]
|
||||
if not username.strip():
|
||||
message_lines.append("- Username is not filled")
|
||||
|
||||
if not password.strip():
|
||||
message_lines.append("- Password is not filled")
|
||||
|
||||
if username and password:
|
||||
message_lines.append("- Check your credentials")
|
||||
|
||||
self._set_message("<br/>".join(message_lines))
|
||||
self._set_input_focus(self._username_input)
|
||||
|
||||
def _set_input_focus(self, widget):
|
||||
widget.setFocus(QtCore.Qt.MouseFocusReason)
|
||||
|
||||
def _set_input_valid_state(self, widget, valid):
|
||||
state = ""
|
||||
if valid is True:
|
||||
state = "valid"
|
||||
elif valid is False:
|
||||
state = "invalid"
|
||||
set_style_property(widget, "state", state)
|
||||
|
||||
def _set_message(self, message):
|
||||
self._message_label.setText(message)
|
||||
|
||||
def _clear_message(self):
|
||||
self._message_label.setText("")
|
||||
|
||||
def _set_unexpected_error(self):
|
||||
# TODO add traceback somewhere
|
||||
# - maybe a button to show or copy?
|
||||
traceback.print_exc()
|
||||
lines = [
|
||||
"<b>Unexpected error happened</b>",
|
||||
"- Can be caused by wrong url (leading elsewhere)"
|
||||
]
|
||||
self._set_message("<br/>".join(lines))
|
||||
|
||||
def set_url(self, url):
|
||||
self._url_preview.setText(url)
|
||||
self._url_input.setText(url)
|
||||
self._validate_url()
|
||||
|
||||
def set_username(self, username):
|
||||
self._username_preview.setText(username)
|
||||
self._username_input.setText(username)
|
||||
|
||||
def _set_api_key(self, api_key):
|
||||
if not api_key or len(api_key) < 3:
|
||||
self._api_preview.setText(api_key or "")
|
||||
return
|
||||
|
||||
api_key_len = len(api_key)
|
||||
offset = 6
|
||||
if api_key_len < offset:
|
||||
offset = api_key_len // 2
|
||||
api_key = api_key[:offset] + "." * (api_key_len - offset)
|
||||
|
||||
self._api_preview.setText(api_key)
|
||||
|
||||
def set_logged_in(
|
||||
self,
|
||||
logged_in,
|
||||
url=None,
|
||||
username=None,
|
||||
api_key=None,
|
||||
allow_logout=None
|
||||
):
|
||||
if url is not None:
|
||||
self.set_url(url)
|
||||
|
||||
if username is not None:
|
||||
self.set_username(username)
|
||||
|
||||
if api_key:
|
||||
self._set_api_key(api_key)
|
||||
|
||||
if logged_in and allow_logout is None:
|
||||
allow_logout = True
|
||||
|
||||
self._set_logged_in(logged_in)
|
||||
|
||||
if allow_logout:
|
||||
self.set_allow_logout(True)
|
||||
elif allow_logout is False:
|
||||
self.set_allow_logout(False)
|
||||
|
||||
|
||||
def ask_to_login(url=None, username=None, always_on_top=False):
|
||||
"""Ask user to login using Qt dialog.
|
||||
|
||||
Function creates new QApplication if is not created yet.
|
||||
|
||||
Args:
|
||||
url (Optional[str]): Server url that will be prefilled in dialog.
|
||||
username (Optional[str]): Username that will be prefilled in dialog.
|
||||
always_on_top (Optional[bool]): Window will be drawn on top of
|
||||
other windows.
|
||||
|
||||
Returns:
|
||||
tuple[str, str, str]: Returns Url, user's token and username. Url can
|
||||
be changed during dialog lifetime that's why the url is returned.
|
||||
"""
|
||||
|
||||
app_instance = get_qt_app()
|
||||
|
||||
window = ServerLoginWindow()
|
||||
if always_on_top:
|
||||
window.setWindowFlags(
|
||||
window.windowFlags()
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
|
||||
if url:
|
||||
window.set_url(url)
|
||||
|
||||
if username:
|
||||
window.set_username(username)
|
||||
|
||||
if not app_instance.startingUp():
|
||||
window.exec_()
|
||||
else:
|
||||
window.open()
|
||||
app_instance.exec_()
|
||||
result = window.result()
|
||||
out_url, out_token, out_username, _ = result
|
||||
return out_url, out_token, out_username
|
||||
|
||||
|
||||
def change_user(url, username, api_key, always_on_top=False):
|
||||
"""Ask user to login using Qt dialog.
|
||||
|
||||
Function creates new QApplication if is not created yet.
|
||||
|
||||
Args:
|
||||
url (str): Server url that will be prefilled in dialog.
|
||||
username (str): Username that will be prefilled in dialog.
|
||||
api_key (str): API key that will be prefilled in dialog.
|
||||
always_on_top (Optional[bool]): Window will be drawn on top of
|
||||
other windows.
|
||||
|
||||
Returns:
|
||||
Tuple[str, str]: Returns Url and user's token. Url can be changed
|
||||
during dialog lifetime that's why the url is returned.
|
||||
"""
|
||||
|
||||
app_instance = get_qt_app()
|
||||
window = ServerLoginWindow()
|
||||
if always_on_top:
|
||||
window.setWindowFlags(
|
||||
window.windowFlags()
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
window.set_logged_in(True, url, username, api_key)
|
||||
|
||||
if not app_instance.startingUp():
|
||||
window.exec_()
|
||||
else:
|
||||
window.open()
|
||||
# This can become main Qt loop. Maybe should live elsewhere
|
||||
app_instance.exec_()
|
||||
return window.result()
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
|
||||
|
||||
class PressHoverButton(QtWidgets.QPushButton):
|
||||
"""Keep track about mouse press/release and enter/leave."""
|
||||
|
||||
_mouse_pressed = False
|
||||
_mouse_hovered = False
|
||||
change_state = QtCore.Signal(bool)
|
||||
|
||||
def mousePressEvent(self, event):
|
||||
self._mouse_pressed = True
|
||||
self._mouse_hovered = True
|
||||
self.change_state.emit(self._mouse_hovered)
|
||||
super(PressHoverButton, self).mousePressEvent(event)
|
||||
|
||||
def mouseReleaseEvent(self, event):
|
||||
self._mouse_pressed = False
|
||||
self._mouse_hovered = False
|
||||
self.change_state.emit(self._mouse_hovered)
|
||||
super(PressHoverButton, self).mouseReleaseEvent(event)
|
||||
|
||||
def mouseMoveEvent(self, event):
|
||||
mouse_pos = self.mapFromGlobal(QtGui.QCursor.pos())
|
||||
under_mouse = self.rect().contains(mouse_pos)
|
||||
if under_mouse != self._mouse_hovered:
|
||||
self._mouse_hovered = under_mouse
|
||||
self.change_state.emit(self._mouse_hovered)
|
||||
|
||||
super(PressHoverButton, self).mouseMoveEvent(event)
|
||||
|
||||
|
||||
class PlaceholderLineEdit(QtWidgets.QLineEdit):
|
||||
"""Set placeholder color of QLineEdit in Qt 5.12 and higher."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(PlaceholderLineEdit, self).__init__(*args, **kwargs)
|
||||
# Change placeholder palette color
|
||||
if hasattr(QtGui.QPalette, "PlaceholderText"):
|
||||
filter_palette = self.palette()
|
||||
color = QtGui.QColor("#D3D8DE")
|
||||
color.setAlpha(67)
|
||||
filter_palette.setColor(
|
||||
QtGui.QPalette.PlaceholderText,
|
||||
color
|
||||
)
|
||||
self.setPalette(filter_palette)
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
Addon distribution tool
|
||||
------------------------
|
||||
|
||||
Code in this folder is backend portion of Addon distribution logic for v4 server.
|
||||
|
||||
Each host, module will be separate Addon in the future. Each v4 server could run different set of Addons.
|
||||
|
||||
Client (running on artist machine) will in the first step ask v4 for list of enabled addons.
|
||||
(It expects list of json documents matching to `addon_distribution.py:AddonInfo` object.)
|
||||
Next it will compare presence of enabled addon version in local folder. In the case of missing version of
|
||||
an addon, client will use information in the addon to download (from http/shared local disk/git) zip file
|
||||
and unzip it.
|
||||
|
||||
Required part of addon distribution will be sharing of dependencies (python libraries, utilities) which is not part of this folder.
|
||||
|
||||
Location of this folder might change in the future as it will be required for a clint to add this folder to sys.path reliably.
|
||||
|
||||
This code needs to be independent on Openpype code as much as possible!
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
from .control import AyonDistribution, BundleNotFoundError
|
||||
from .utils import show_missing_bundle_information
|
||||
|
||||
|
||||
__all__ = (
|
||||
"AyonDistribution",
|
||||
"BundleNotFoundError",
|
||||
"show_missing_bundle_information",
|
||||
)
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,265 +0,0 @@
|
|||
import attr
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class UrlType(Enum):
|
||||
HTTP = "http"
|
||||
GIT = "git"
|
||||
FILESYSTEM = "filesystem"
|
||||
SERVER = "server"
|
||||
|
||||
|
||||
@attr.s
|
||||
class MultiPlatformValue(object):
|
||||
windows = attr.ib(default=None)
|
||||
linux = attr.ib(default=None)
|
||||
darwin = attr.ib(default=None)
|
||||
|
||||
|
||||
@attr.s
|
||||
class SourceInfo(object):
|
||||
type = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class LocalSourceInfo(SourceInfo):
|
||||
path = attr.ib(default=attr.Factory(MultiPlatformValue))
|
||||
|
||||
|
||||
@attr.s
|
||||
class WebSourceInfo(SourceInfo):
|
||||
url = attr.ib(default=None)
|
||||
headers = attr.ib(default=None)
|
||||
filename = attr.ib(default=None)
|
||||
|
||||
|
||||
@attr.s
|
||||
class ServerSourceInfo(SourceInfo):
|
||||
filename = attr.ib(default=None)
|
||||
path = attr.ib(default=None)
|
||||
|
||||
|
||||
def convert_source(source):
|
||||
"""Create source object from data information.
|
||||
|
||||
Args:
|
||||
source (Dict[str, any]): Information about source.
|
||||
|
||||
Returns:
|
||||
Union[None, SourceInfo]: Object with source information if type is
|
||||
known.
|
||||
"""
|
||||
|
||||
source_type = source.get("type")
|
||||
if not source_type:
|
||||
return None
|
||||
|
||||
if source_type == UrlType.FILESYSTEM.value:
|
||||
return LocalSourceInfo(
|
||||
type=source_type,
|
||||
path=source["path"]
|
||||
)
|
||||
|
||||
if source_type == UrlType.HTTP.value:
|
||||
url = source["path"]
|
||||
return WebSourceInfo(
|
||||
type=source_type,
|
||||
url=url,
|
||||
headers=source.get("headers"),
|
||||
filename=source.get("filename")
|
||||
)
|
||||
|
||||
if source_type == UrlType.SERVER.value:
|
||||
return ServerSourceInfo(
|
||||
type=source_type,
|
||||
filename=source.get("filename"),
|
||||
path=source.get("path")
|
||||
)
|
||||
|
||||
|
||||
def prepare_sources(src_sources):
|
||||
sources = []
|
||||
unknown_sources = []
|
||||
for source in (src_sources or []):
|
||||
dependency_source = convert_source(source)
|
||||
if dependency_source is not None:
|
||||
sources.append(dependency_source)
|
||||
else:
|
||||
print(f"Unknown source {source.get('type')}")
|
||||
unknown_sources.append(source)
|
||||
return sources, unknown_sources
|
||||
|
||||
|
||||
@attr.s
|
||||
class VersionData(object):
|
||||
version_data = attr.ib(default=None)
|
||||
|
||||
|
||||
@attr.s
|
||||
class AddonVersionInfo(object):
|
||||
version = attr.ib()
|
||||
full_name = attr.ib()
|
||||
title = attr.ib(default=None)
|
||||
require_distribution = attr.ib(default=False)
|
||||
sources = attr.ib(default=attr.Factory(list))
|
||||
unknown_sources = attr.ib(default=attr.Factory(list))
|
||||
hash = attr.ib(default=None)
|
||||
|
||||
@classmethod
|
||||
def from_dict(
|
||||
cls, addon_name, addon_title, addon_version, version_data
|
||||
):
|
||||
"""Addon version info.
|
||||
|
||||
Args:
|
||||
addon_name (str): Name of addon.
|
||||
addon_title (str): Title of addon.
|
||||
addon_version (str): Version of addon.
|
||||
version_data (dict[str, Any]): Addon version information from
|
||||
server.
|
||||
|
||||
Returns:
|
||||
AddonVersionInfo: Addon version info.
|
||||
"""
|
||||
|
||||
full_name = f"{addon_name}_{addon_version}"
|
||||
title = f"{addon_title} {addon_version}"
|
||||
|
||||
source_info = version_data.get("clientSourceInfo")
|
||||
require_distribution = source_info is not None
|
||||
sources, unknown_sources = prepare_sources(source_info)
|
||||
|
||||
return cls(
|
||||
version=addon_version,
|
||||
full_name=full_name,
|
||||
require_distribution=require_distribution,
|
||||
sources=sources,
|
||||
unknown_sources=unknown_sources,
|
||||
hash=version_data.get("hash"),
|
||||
title=title
|
||||
)
|
||||
|
||||
|
||||
@attr.s
|
||||
class AddonInfo(object):
|
||||
"""Object matching json payload from Server"""
|
||||
name = attr.ib()
|
||||
versions = attr.ib(default=attr.Factory(dict))
|
||||
title = attr.ib(default=None)
|
||||
description = attr.ib(default=None)
|
||||
license = attr.ib(default=None)
|
||||
authors = attr.ib(default=None)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data):
|
||||
"""Addon info by available versions.
|
||||
|
||||
Args:
|
||||
data (dict[str, Any]): Addon information from server. Should
|
||||
contain information about every version under 'versions'.
|
||||
|
||||
Returns:
|
||||
AddonInfo: Addon info with available versions.
|
||||
"""
|
||||
|
||||
# server payload contains info about all versions
|
||||
addon_name = data["name"]
|
||||
title = data.get("title") or addon_name
|
||||
|
||||
src_versions = data.get("versions") or {}
|
||||
dst_versions = {
|
||||
addon_version: AddonVersionInfo.from_dict(
|
||||
addon_name, title, addon_version, version_data
|
||||
)
|
||||
for addon_version, version_data in src_versions.items()
|
||||
}
|
||||
return cls(
|
||||
name=addon_name,
|
||||
versions=dst_versions,
|
||||
description=data.get("description"),
|
||||
title=data.get("title") or addon_name,
|
||||
license=data.get("license"),
|
||||
authors=data.get("authors")
|
||||
)
|
||||
|
||||
|
||||
@attr.s
|
||||
class DependencyItem(object):
|
||||
"""Object matching payload from Server about single dependency package"""
|
||||
name = attr.ib()
|
||||
platform_name = attr.ib()
|
||||
checksum = attr.ib()
|
||||
sources = attr.ib(default=attr.Factory(list))
|
||||
unknown_sources = attr.ib(default=attr.Factory(list))
|
||||
source_addons = attr.ib(default=attr.Factory(dict))
|
||||
python_modules = attr.ib(default=attr.Factory(dict))
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, package):
|
||||
src_sources = package.get("sources") or []
|
||||
for source in src_sources:
|
||||
if source.get("type") == "server" and not source.get("filename"):
|
||||
source["filename"] = package["filename"]
|
||||
sources, unknown_sources = prepare_sources(src_sources)
|
||||
return cls(
|
||||
name=package["filename"],
|
||||
platform_name=package["platform"],
|
||||
sources=sources,
|
||||
unknown_sources=unknown_sources,
|
||||
checksum=package["checksum"],
|
||||
source_addons=package["sourceAddons"],
|
||||
python_modules=package["pythonModules"]
|
||||
)
|
||||
|
||||
|
||||
@attr.s
|
||||
class Installer:
|
||||
version = attr.ib()
|
||||
filename = attr.ib()
|
||||
platform_name = attr.ib()
|
||||
size = attr.ib()
|
||||
checksum = attr.ib()
|
||||
python_version = attr.ib()
|
||||
python_modules = attr.ib()
|
||||
sources = attr.ib(default=attr.Factory(list))
|
||||
unknown_sources = attr.ib(default=attr.Factory(list))
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, installer_info):
|
||||
sources, unknown_sources = prepare_sources(
|
||||
installer_info.get("sources"))
|
||||
|
||||
return cls(
|
||||
version=installer_info["version"],
|
||||
filename=installer_info["filename"],
|
||||
platform_name=installer_info["platform"],
|
||||
size=installer_info["size"],
|
||||
sources=sources,
|
||||
unknown_sources=unknown_sources,
|
||||
checksum=installer_info["checksum"],
|
||||
python_version=installer_info["pythonVersion"],
|
||||
python_modules=installer_info["pythonModules"]
|
||||
)
|
||||
|
||||
|
||||
@attr.s
|
||||
class Bundle:
|
||||
"""Class representing bundle information."""
|
||||
|
||||
name = attr.ib()
|
||||
installer_version = attr.ib()
|
||||
addon_versions = attr.ib(default=attr.Factory(dict))
|
||||
dependency_packages = attr.ib(default=attr.Factory(dict))
|
||||
is_production = attr.ib(default=False)
|
||||
is_staging = attr.ib(default=False)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data):
|
||||
return cls(
|
||||
name=data["name"],
|
||||
installer_version=data.get("installerVersion"),
|
||||
addon_versions=data.get("addons", {}),
|
||||
dependency_packages=data.get("dependencyPackages", {}),
|
||||
is_production=data["isProduction"],
|
||||
is_staging=data["isStaging"],
|
||||
)
|
||||
|
|
@ -1,250 +0,0 @@
|
|||
import os
|
||||
import logging
|
||||
import platform
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
import ayon_api
|
||||
|
||||
from .file_handler import RemoteFileHandler
|
||||
from .data_structures import UrlType
|
||||
|
||||
|
||||
class SourceDownloader(metaclass=ABCMeta):
|
||||
"""Abstract class for source downloader."""
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def download(cls, source, destination_dir, data, transfer_progress):
|
||||
"""Returns url of downloaded addon zip file.
|
||||
|
||||
Tranfer progress can be ignored, in that case file transfer won't
|
||||
be shown as 0-100% but as 'running'. First step should be to set
|
||||
destination content size and then add transferred chunk sizes.
|
||||
|
||||
Args:
|
||||
source (dict): {type:"http", "url":"https://} ...}
|
||||
destination_dir (str): local folder to unzip
|
||||
data (dict): More information about download content. Always have
|
||||
'type' key in.
|
||||
transfer_progress (ayon_api.TransferProgress): Progress of
|
||||
transferred (copy/download) content.
|
||||
|
||||
Returns:
|
||||
(str) local path to addon zip file
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def cleanup(cls, source, destination_dir, data):
|
||||
"""Cleanup files when distribution finishes or crashes.
|
||||
|
||||
Cleanup e.g. temporary files (downloaded zip) or other related stuff
|
||||
to downloader.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def check_hash(cls, addon_path, addon_hash, hash_type="sha256"):
|
||||
"""Compares 'hash' of downloaded 'addon_url' file.
|
||||
|
||||
Args:
|
||||
addon_path (str): Local path to addon file.
|
||||
addon_hash (str): Hash of downloaded file.
|
||||
hash_type (str): Type of hash.
|
||||
|
||||
Raises:
|
||||
ValueError if hashes doesn't match
|
||||
"""
|
||||
|
||||
if not os.path.exists(addon_path):
|
||||
raise ValueError(f"{addon_path} doesn't exist.")
|
||||
if not RemoteFileHandler.check_integrity(
|
||||
addon_path, addon_hash, hash_type=hash_type
|
||||
):
|
||||
raise ValueError(f"{addon_path} doesn't match expected hash.")
|
||||
|
||||
@classmethod
|
||||
def unzip(cls, addon_zip_path, destination_dir):
|
||||
"""Unzips local 'addon_zip_path' to 'destination'.
|
||||
|
||||
Args:
|
||||
addon_zip_path (str): local path to addon zip file
|
||||
destination_dir (str): local folder to unzip
|
||||
"""
|
||||
|
||||
RemoteFileHandler.unzip(addon_zip_path, destination_dir)
|
||||
os.remove(addon_zip_path)
|
||||
|
||||
|
||||
class OSDownloader(SourceDownloader):
|
||||
"""Downloader using files from file drive."""
|
||||
|
||||
@classmethod
|
||||
def download(cls, source, destination_dir, data, transfer_progress):
|
||||
# OS doesn't need to download, unzip directly
|
||||
addon_url = source["path"].get(platform.system().lower())
|
||||
if not os.path.exists(addon_url):
|
||||
raise ValueError(f"{addon_url} is not accessible")
|
||||
return addon_url
|
||||
|
||||
@classmethod
|
||||
def cleanup(cls, source, destination_dir, data):
|
||||
# Nothing to do - download does not copy anything
|
||||
pass
|
||||
|
||||
|
||||
class HTTPDownloader(SourceDownloader):
|
||||
"""Downloader using http or https protocol."""
|
||||
|
||||
CHUNK_SIZE = 100000
|
||||
|
||||
@staticmethod
|
||||
def get_filename(source):
|
||||
source_url = source["url"]
|
||||
filename = source.get("filename")
|
||||
if not filename:
|
||||
filename = os.path.basename(source_url)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
allowed_exts = set(RemoteFileHandler.IMPLEMENTED_ZIP_FORMATS)
|
||||
if ext.lower().lstrip(".") not in allowed_exts:
|
||||
filename = f"{basename}.zip"
|
||||
return filename
|
||||
|
||||
@classmethod
|
||||
def download(cls, source, destination_dir, data, transfer_progress):
|
||||
source_url = source["url"]
|
||||
cls.log.debug(f"Downloading {source_url} to {destination_dir}")
|
||||
headers = source.get("headers")
|
||||
filename = cls.get_filename(source)
|
||||
|
||||
# TODO use transfer progress
|
||||
RemoteFileHandler.download_url(
|
||||
source_url,
|
||||
destination_dir,
|
||||
filename,
|
||||
headers=headers
|
||||
)
|
||||
|
||||
return os.path.join(destination_dir, filename)
|
||||
|
||||
@classmethod
|
||||
def cleanup(cls, source, destination_dir, data):
|
||||
filename = cls.get_filename(source)
|
||||
filepath = os.path.join(destination_dir, filename)
|
||||
if os.path.exists(filepath) and os.path.isfile(filepath):
|
||||
os.remove(filepath)
|
||||
|
||||
|
||||
class AyonServerDownloader(SourceDownloader):
|
||||
"""Downloads static resource file from AYON Server.
|
||||
|
||||
Expects filled env var AYON_SERVER_URL.
|
||||
"""
|
||||
|
||||
CHUNK_SIZE = 8192
|
||||
|
||||
@classmethod
|
||||
def download(cls, source, destination_dir, data, transfer_progress):
|
||||
path = source["path"]
|
||||
filename = source["filename"]
|
||||
if path and not filename:
|
||||
filename = path.split("/")[-1]
|
||||
|
||||
cls.log.debug(f"Downloading {filename} to {destination_dir}")
|
||||
|
||||
_, ext = os.path.splitext(filename)
|
||||
ext = ext.lower().lstrip(".")
|
||||
valid_exts = set(RemoteFileHandler.IMPLEMENTED_ZIP_FORMATS)
|
||||
if ext not in valid_exts:
|
||||
raise ValueError((
|
||||
f"Invalid file extension \"{ext}\"."
|
||||
f" Expected {', '.join(valid_exts)}"
|
||||
))
|
||||
|
||||
if path:
|
||||
filepath = os.path.join(destination_dir, filename)
|
||||
return ayon_api.download_file(
|
||||
path,
|
||||
filepath,
|
||||
chunk_size=cls.CHUNK_SIZE,
|
||||
progress=transfer_progress
|
||||
)
|
||||
|
||||
# dst_filepath = os.path.join(destination_dir, filename)
|
||||
if data["type"] == "dependency_package":
|
||||
return ayon_api.download_dependency_package(
|
||||
data["name"],
|
||||
destination_dir,
|
||||
filename,
|
||||
platform_name=data["platform"],
|
||||
chunk_size=cls.CHUNK_SIZE,
|
||||
progress=transfer_progress
|
||||
)
|
||||
|
||||
if data["type"] == "addon":
|
||||
return ayon_api.download_addon_private_file(
|
||||
data["name"],
|
||||
data["version"],
|
||||
filename,
|
||||
destination_dir,
|
||||
chunk_size=cls.CHUNK_SIZE,
|
||||
progress=transfer_progress
|
||||
)
|
||||
|
||||
raise ValueError(f"Unknown type to download \"{data['type']}\"")
|
||||
|
||||
@classmethod
|
||||
def cleanup(cls, source, destination_dir, data):
|
||||
filename = source["filename"]
|
||||
filepath = os.path.join(destination_dir, filename)
|
||||
if os.path.exists(filepath) and os.path.isfile(filepath):
|
||||
os.remove(filepath)
|
||||
|
||||
|
||||
class DownloadFactory:
|
||||
"""Factory for downloaders."""
|
||||
|
||||
def __init__(self):
|
||||
self._downloaders = {}
|
||||
|
||||
def register_format(self, downloader_type, downloader):
|
||||
"""Register downloader for download type.
|
||||
|
||||
Args:
|
||||
downloader_type (UrlType): Type of source.
|
||||
downloader (SourceDownloader): Downloader which cares about
|
||||
download, hash check and unzipping.
|
||||
"""
|
||||
|
||||
self._downloaders[downloader_type.value] = downloader
|
||||
|
||||
def get_downloader(self, downloader_type):
|
||||
"""Registered downloader for type.
|
||||
|
||||
Args:
|
||||
downloader_type (UrlType): Type of source.
|
||||
|
||||
Returns:
|
||||
SourceDownloader: Downloader object which should care about file
|
||||
distribution.
|
||||
|
||||
Raises:
|
||||
ValueError: If type does not have registered downloader.
|
||||
"""
|
||||
|
||||
if downloader := self._downloaders.get(downloader_type):
|
||||
return downloader()
|
||||
raise ValueError(f"{downloader_type} not implemented")
|
||||
|
||||
|
||||
def get_default_download_factory():
|
||||
download_factory = DownloadFactory()
|
||||
download_factory.register_format(UrlType.FILESYSTEM, OSDownloader)
|
||||
download_factory.register_format(UrlType.HTTP, HTTPDownloader)
|
||||
download_factory.register_format(UrlType.SERVER, AyonServerDownloader)
|
||||
return download_factory
|
||||
|
|
@ -1,248 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import copy
|
||||
import tempfile
|
||||
|
||||
|
||||
import attr
|
||||
import pytest
|
||||
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
root_dir = os.path.abspath(os.path.join(current_dir, "..", "..", "..", ".."))
|
||||
sys.path.append(root_dir)
|
||||
|
||||
from common.ayon_common.distribution.downloaders import (
|
||||
DownloadFactory,
|
||||
OSDownloader,
|
||||
HTTPDownloader,
|
||||
)
|
||||
from common.ayon_common.distribution.control import (
|
||||
AyonDistribution,
|
||||
UpdateState,
|
||||
)
|
||||
from common.ayon_common.distribution.data_structures import (
|
||||
AddonInfo,
|
||||
UrlType,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def download_factory():
|
||||
addon_downloader = DownloadFactory()
|
||||
addon_downloader.register_format(UrlType.FILESYSTEM, OSDownloader)
|
||||
addon_downloader.register_format(UrlType.HTTP, HTTPDownloader)
|
||||
|
||||
yield addon_downloader
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def http_downloader(download_factory):
|
||||
yield download_factory.get_downloader(UrlType.HTTP.value)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_folder():
|
||||
yield tempfile.mkdtemp(prefix="ayon_test_")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_bundles():
|
||||
yield {
|
||||
"bundles": [
|
||||
{
|
||||
"name": "TestBundle",
|
||||
"createdAt": "2023-06-29T00:00:00.0+00:00",
|
||||
"installerVersion": None,
|
||||
"addons": {
|
||||
"slack": "1.0.0"
|
||||
},
|
||||
"dependencyPackages": {},
|
||||
"isProduction": True,
|
||||
"isStaging": False
|
||||
}
|
||||
],
|
||||
"productionBundle": "TestBundle",
|
||||
"stagingBundle": None
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_addon_info():
|
||||
yield {
|
||||
"name": "slack",
|
||||
"title": "Slack addon",
|
||||
"versions": {
|
||||
"1.0.0": {
|
||||
"hasSettings": True,
|
||||
"hasSiteSettings": False,
|
||||
"clientPyproject": {
|
||||
"tool": {
|
||||
"poetry": {
|
||||
"dependencies": {
|
||||
"nxtools": "^1.6",
|
||||
"orjson": "^3.6.7",
|
||||
"typer": "^0.4.1",
|
||||
"email-validator": "^1.1.3",
|
||||
"python": "^3.10",
|
||||
"fastapi": "^0.73.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"clientSourceInfo": [
|
||||
{
|
||||
"type": "http",
|
||||
"path": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing", # noqa
|
||||
"filename": "dummy.zip"
|
||||
},
|
||||
{
|
||||
"type": "filesystem",
|
||||
"path": {
|
||||
"windows": "P:/sources/some_file.zip",
|
||||
"linux": "/mnt/srv/sources/some_file.zip",
|
||||
"darwin": "/Volumes/srv/sources/some_file.zip"
|
||||
}
|
||||
}
|
||||
],
|
||||
"frontendScopes": {
|
||||
"project": {
|
||||
"sidebar": "hierarchy",
|
||||
}
|
||||
},
|
||||
"hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658" # noqa
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
|
||||
|
||||
def test_register(printer):
|
||||
download_factory = DownloadFactory()
|
||||
|
||||
assert len(download_factory._downloaders) == 0, "Contains registered"
|
||||
|
||||
download_factory.register_format(UrlType.FILESYSTEM, OSDownloader)
|
||||
assert len(download_factory._downloaders) == 1, "Should contain one"
|
||||
|
||||
|
||||
def test_get_downloader(printer, download_factory):
|
||||
assert download_factory.get_downloader(UrlType.FILESYSTEM.value), "Should find" # noqa
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
download_factory.get_downloader("unknown"), "Shouldn't find"
|
||||
|
||||
|
||||
def test_addon_info(printer, sample_addon_info):
|
||||
"""Tests parsing of expected payload from v4 server into AadonInfo."""
|
||||
valid_minimum = {
|
||||
"name": "slack",
|
||||
"versions": {
|
||||
"1.0.0": {
|
||||
"clientSourceInfo": [
|
||||
{
|
||||
"type": "filesystem",
|
||||
"path": {
|
||||
"windows": "P:/sources/some_file.zip",
|
||||
"linux": "/mnt/srv/sources/some_file.zip",
|
||||
"darwin": "/Volumes/srv/sources/some_file.zip"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert AddonInfo.from_dict(valid_minimum), "Missing required fields"
|
||||
|
||||
addon = AddonInfo.from_dict(sample_addon_info)
|
||||
assert addon, "Should be created"
|
||||
assert addon.name == "slack", "Incorrect name"
|
||||
assert "1.0.0" in addon.versions, "Version is not in versions"
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
assert addon["name"], "Dict approach not implemented"
|
||||
|
||||
addon_as_dict = attr.asdict(addon)
|
||||
assert addon_as_dict["name"], "Dict approach should work"
|
||||
|
||||
|
||||
def _get_dist_item(dist_items, name, version):
|
||||
final_dist_info = next(
|
||||
(
|
||||
dist_info
|
||||
for dist_info in dist_items
|
||||
if (
|
||||
dist_info["addon_name"] == name
|
||||
and dist_info["addon_version"] == version
|
||||
)
|
||||
),
|
||||
{}
|
||||
)
|
||||
return final_dist_info["dist_item"]
|
||||
|
||||
|
||||
def test_update_addon_state(
|
||||
printer, sample_addon_info, temp_folder, download_factory, sample_bundles
|
||||
):
|
||||
"""Tests possible cases of addon update."""
|
||||
|
||||
addon_version = list(sample_addon_info["versions"])[0]
|
||||
broken_addon_info = copy.deepcopy(sample_addon_info)
|
||||
|
||||
# Cause crash because of invalid hash
|
||||
broken_addon_info["versions"][addon_version]["hash"] = "brokenhash"
|
||||
distribution = AyonDistribution(
|
||||
addon_dirpath=temp_folder,
|
||||
dependency_dirpath=temp_folder,
|
||||
dist_factory=download_factory,
|
||||
addons_info=[broken_addon_info],
|
||||
dependency_packages_info=[],
|
||||
bundles_info=sample_bundles
|
||||
)
|
||||
distribution.distribute()
|
||||
dist_items = distribution.get_addon_dist_items()
|
||||
slack_dist_item = _get_dist_item(
|
||||
dist_items,
|
||||
sample_addon_info["name"],
|
||||
addon_version
|
||||
)
|
||||
slack_state = slack_dist_item.state
|
||||
assert slack_state == UpdateState.UPDATE_FAILED, (
|
||||
"Update should have failed because of wrong hash")
|
||||
|
||||
# Fix cache and validate if was updated
|
||||
distribution = AyonDistribution(
|
||||
addon_dirpath=temp_folder,
|
||||
dependency_dirpath=temp_folder,
|
||||
dist_factory=download_factory,
|
||||
addons_info=[sample_addon_info],
|
||||
dependency_packages_info=[],
|
||||
bundles_info=sample_bundles
|
||||
)
|
||||
distribution.distribute()
|
||||
dist_items = distribution.get_addon_dist_items()
|
||||
slack_dist_item = _get_dist_item(
|
||||
dist_items,
|
||||
sample_addon_info["name"],
|
||||
addon_version
|
||||
)
|
||||
assert slack_dist_item.state == UpdateState.UPDATED, (
|
||||
"Addon should have been updated")
|
||||
|
||||
# Is UPDATED without calling distribute
|
||||
distribution = AyonDistribution(
|
||||
addon_dirpath=temp_folder,
|
||||
dependency_dirpath=temp_folder,
|
||||
dist_factory=download_factory,
|
||||
addons_info=[sample_addon_info],
|
||||
dependency_packages_info=[],
|
||||
bundles_info=sample_bundles
|
||||
)
|
||||
dist_items = distribution.get_addon_dist_items()
|
||||
slack_dist_item = _get_dist_item(
|
||||
dist_items,
|
||||
sample_addon_info["name"],
|
||||
addon_version
|
||||
)
|
||||
assert slack_dist_item.state == UpdateState.UPDATED, (
|
||||
"Addon should already exist")
|
||||
|
|
@ -1,146 +0,0 @@
|
|||
import sys
|
||||
|
||||
from qtpy import QtWidgets, QtGui
|
||||
|
||||
from ayon_common import is_staging_enabled
|
||||
from ayon_common.resources import (
|
||||
get_icon_path,
|
||||
load_stylesheet,
|
||||
)
|
||||
from ayon_common.ui_utils import get_qt_app
|
||||
|
||||
|
||||
class MissingBundleWindow(QtWidgets.QDialog):
|
||||
default_width = 410
|
||||
default_height = 170
|
||||
|
||||
def __init__(
|
||||
self, url=None, bundle_name=None, use_staging=None, parent=None
|
||||
):
|
||||
super().__init__(parent)
|
||||
|
||||
icon_path = get_icon_path()
|
||||
icon = QtGui.QIcon(icon_path)
|
||||
self.setWindowIcon(icon)
|
||||
self.setWindowTitle("Missing Bundle")
|
||||
|
||||
self._url = url
|
||||
self._bundle_name = bundle_name
|
||||
self._use_staging = use_staging
|
||||
self._first_show = True
|
||||
|
||||
info_label = QtWidgets.QLabel("", self)
|
||||
info_label.setWordWrap(True)
|
||||
|
||||
btns_widget = QtWidgets.QWidget(self)
|
||||
confirm_btn = QtWidgets.QPushButton("Exit", btns_widget)
|
||||
|
||||
btns_layout = QtWidgets.QHBoxLayout(btns_widget)
|
||||
btns_layout.setContentsMargins(0, 0, 0, 0)
|
||||
btns_layout.addStretch(1)
|
||||
btns_layout.addWidget(confirm_btn, 0)
|
||||
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
main_layout.addWidget(info_label, 0)
|
||||
main_layout.addStretch(1)
|
||||
main_layout.addWidget(btns_widget, 0)
|
||||
|
||||
confirm_btn.clicked.connect(self._on_confirm_click)
|
||||
|
||||
self._info_label = info_label
|
||||
self._confirm_btn = confirm_btn
|
||||
|
||||
self._update_label()
|
||||
|
||||
def set_url(self, url):
|
||||
if url == self._url:
|
||||
return
|
||||
self._url = url
|
||||
self._update_label()
|
||||
|
||||
def set_bundle_name(self, bundle_name):
|
||||
if bundle_name == self._bundle_name:
|
||||
return
|
||||
self._bundle_name = bundle_name
|
||||
self._update_label()
|
||||
|
||||
def set_use_staging(self, use_staging):
|
||||
if self._use_staging == use_staging:
|
||||
return
|
||||
self._use_staging = use_staging
|
||||
self._update_label()
|
||||
|
||||
def showEvent(self, event):
|
||||
super().showEvent(event)
|
||||
if self._first_show:
|
||||
self._first_show = False
|
||||
self._on_first_show()
|
||||
self._recalculate_sizes()
|
||||
|
||||
def resizeEvent(self, event):
|
||||
super().resizeEvent(event)
|
||||
self._recalculate_sizes()
|
||||
|
||||
def _recalculate_sizes(self):
|
||||
hint = self._confirm_btn.sizeHint()
|
||||
new_width = max((hint.width(), hint.height() * 3))
|
||||
self._confirm_btn.setMinimumWidth(new_width)
|
||||
|
||||
def _on_first_show(self):
|
||||
self.setStyleSheet(load_stylesheet())
|
||||
self.resize(self.default_width, self.default_height)
|
||||
|
||||
def _on_confirm_click(self):
|
||||
self.accept()
|
||||
self.close()
|
||||
|
||||
def _update_label(self):
|
||||
self._info_label.setText(self._get_label())
|
||||
|
||||
def _get_label(self):
|
||||
url_part = f" <b>{self._url}</b>" if self._url else ""
|
||||
|
||||
if self._bundle_name:
|
||||
return (
|
||||
f"Requested release bundle <b>{self._bundle_name}</b>"
|
||||
f" is not available on server{url_part}."
|
||||
"<br/><br/>Try to restart AYON desktop launcher. Please"
|
||||
" contact your administrator if issue persist."
|
||||
)
|
||||
mode = "staging" if self._use_staging else "production"
|
||||
return (
|
||||
f"No release bundle is set as {mode} on the AYON"
|
||||
f" server{url_part} so there is nothing to launch."
|
||||
"<br/><br/>Please contact your administrator"
|
||||
" to resolve the issue."
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
"""Show message that server does not have set bundle to use.
|
||||
|
||||
It is possible to pass url as argument to show it in the message. To use
|
||||
this feature, pass `--url <url>` as argument to this script.
|
||||
"""
|
||||
|
||||
url = None
|
||||
bundle_name = None
|
||||
if "--url" in sys.argv:
|
||||
url_index = sys.argv.index("--url") + 1
|
||||
if url_index < len(sys.argv):
|
||||
url = sys.argv[url_index]
|
||||
|
||||
if "--bundle" in sys.argv:
|
||||
bundle_index = sys.argv.index("--bundle") + 1
|
||||
if bundle_index < len(sys.argv):
|
||||
bundle_name = sys.argv[bundle_index]
|
||||
|
||||
use_staging = is_staging_enabled()
|
||||
app = get_qt_app()
|
||||
window = MissingBundleWindow(url, bundle_name, use_staging)
|
||||
window.show()
|
||||
app.exec_()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,90 +0,0 @@
|
|||
import os
|
||||
import subprocess
|
||||
|
||||
from ayon_common.utils import get_ayon_appdirs, get_ayon_launch_args
|
||||
|
||||
|
||||
def get_local_dir(*subdirs):
|
||||
"""Get product directory in user's home directory.
|
||||
|
||||
Each user on machine have own local directory where are downloaded updates,
|
||||
addons etc.
|
||||
|
||||
Returns:
|
||||
str: Path to product local directory.
|
||||
"""
|
||||
|
||||
if not subdirs:
|
||||
raise ValueError("Must fill dir_name if nothing else provided!")
|
||||
|
||||
local_dir = get_ayon_appdirs(*subdirs)
|
||||
if not os.path.isdir(local_dir):
|
||||
try:
|
||||
os.makedirs(local_dir)
|
||||
except Exception: # TODO fix exception
|
||||
raise RuntimeError(f"Cannot create {local_dir}")
|
||||
|
||||
return local_dir
|
||||
|
||||
|
||||
def get_addons_dir():
|
||||
"""Directory where addon packages are stored.
|
||||
|
||||
Path to addons is defined using python module 'appdirs' which
|
||||
|
||||
The path is stored into environment variable 'AYON_ADDONS_DIR'.
|
||||
Value of environment variable can be overriden, but we highly recommended
|
||||
to use that option only for development purposes.
|
||||
|
||||
Returns:
|
||||
str: Path to directory where addons should be downloaded.
|
||||
"""
|
||||
|
||||
addons_dir = os.environ.get("AYON_ADDONS_DIR")
|
||||
if not addons_dir:
|
||||
addons_dir = get_local_dir("addons")
|
||||
os.environ["AYON_ADDONS_DIR"] = addons_dir
|
||||
return addons_dir
|
||||
|
||||
|
||||
def get_dependencies_dir():
|
||||
"""Directory where dependency packages are stored.
|
||||
|
||||
Path to addons is defined using python module 'appdirs' which
|
||||
|
||||
The path is stored into environment variable 'AYON_DEPENDENCIES_DIR'.
|
||||
Value of environment variable can be overriden, but we highly recommended
|
||||
to use that option only for development purposes.
|
||||
|
||||
Returns:
|
||||
str: Path to directory where dependency packages should be downloaded.
|
||||
"""
|
||||
|
||||
dependencies_dir = os.environ.get("AYON_DEPENDENCIES_DIR")
|
||||
if not dependencies_dir:
|
||||
dependencies_dir = get_local_dir("dependency_packages")
|
||||
os.environ["AYON_DEPENDENCIES_DIR"] = dependencies_dir
|
||||
return dependencies_dir
|
||||
|
||||
|
||||
def show_missing_bundle_information(url, bundle_name=None):
|
||||
"""Show missing bundle information window.
|
||||
|
||||
This function should be called when server does not have set bundle for
|
||||
production or staging, or when bundle that should be used is not available
|
||||
on server.
|
||||
|
||||
Using subprocess to show the dialog. Is blocking and is waiting until
|
||||
dialog is closed.
|
||||
|
||||
Args:
|
||||
url (str): Server url where bundle is not set.
|
||||
bundle_name (Optional[str]): Name of bundle that was not found.
|
||||
"""
|
||||
|
||||
ui_dir = os.path.join(os.path.dirname(__file__), "ui")
|
||||
script_path = os.path.join(ui_dir, "missing_bundle_window.py")
|
||||
args = get_ayon_launch_args(script_path, "--skip-bootstrap", "--url", url)
|
||||
if bundle_name:
|
||||
args.extend(["--bundle", bundle_name])
|
||||
subprocess.call(args)
|
||||
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 4.2 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 16 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 15 KiB |
|
|
@ -1,25 +0,0 @@
|
|||
import os
|
||||
|
||||
from ayon_common.utils import is_staging_enabled
|
||||
|
||||
RESOURCES_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
def get_resource_path(*args):
|
||||
path_items = list(args)
|
||||
path_items.insert(0, RESOURCES_DIR)
|
||||
return os.path.sep.join(path_items)
|
||||
|
||||
|
||||
def get_icon_path():
|
||||
if is_staging_enabled():
|
||||
return get_resource_path("AYON_staging.png")
|
||||
return get_resource_path("AYON.png")
|
||||
|
||||
|
||||
def load_stylesheet():
|
||||
stylesheet_path = get_resource_path("stylesheet.css")
|
||||
|
||||
with open(stylesheet_path, "r") as stream:
|
||||
content = stream.read()
|
||||
return content
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 8.9 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 2.1 KiB |
|
|
@ -1,84 +0,0 @@
|
|||
* {
|
||||
font-size: 10pt;
|
||||
font-family: "Noto Sans";
|
||||
font-weight: 450;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
QWidget {
|
||||
color: #D3D8DE;
|
||||
background: #2C313A;
|
||||
border-radius: 0px;
|
||||
}
|
||||
|
||||
QWidget:disabled {
|
||||
color: #5b6779;
|
||||
}
|
||||
|
||||
QLabel {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
QPushButton {
|
||||
text-align:center center;
|
||||
border: 0px solid transparent;
|
||||
border-radius: 0.2em;
|
||||
padding: 3px 5px 3px 5px;
|
||||
background: #434a56;
|
||||
}
|
||||
|
||||
QPushButton:hover {
|
||||
background: rgba(168, 175, 189, 0.3);
|
||||
color: #F0F2F5;
|
||||
}
|
||||
|
||||
QPushButton:pressed {}
|
||||
|
||||
QPushButton:disabled {
|
||||
background: #434a56;
|
||||
}
|
||||
|
||||
QLineEdit {
|
||||
border: 1px solid #373D48;
|
||||
border-radius: 0.3em;
|
||||
background: #21252B;
|
||||
padding: 0.1em;
|
||||
}
|
||||
|
||||
QLineEdit:disabled {
|
||||
background: #2C313A;
|
||||
}
|
||||
QLineEdit:hover {
|
||||
border-color: rgba(168, 175, 189, .3);
|
||||
}
|
||||
QLineEdit:focus {
|
||||
border-color: rgb(92, 173, 214);
|
||||
}
|
||||
|
||||
QLineEdit[state="invalid"] {
|
||||
border-color: #AA5050;
|
||||
}
|
||||
|
||||
#Separator {
|
||||
background: rgba(75, 83, 98, 127);
|
||||
}
|
||||
|
||||
#PasswordBtn {
|
||||
border: none;
|
||||
padding: 0.1em;
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
#PasswordBtn:hover {
|
||||
background: #434a56;
|
||||
}
|
||||
|
||||
#LikeDisabledInput {
|
||||
background: #2C313A;
|
||||
}
|
||||
#LikeDisabledInput:hover {
|
||||
border-color: #373D48;
|
||||
}
|
||||
#LikeDisabledInput:focus {
|
||||
border-color: #373D48;
|
||||
}
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
import sys
|
||||
from qtpy import QtWidgets, QtCore
|
||||
|
||||
|
||||
def set_style_property(widget, property_name, property_value):
|
||||
"""Set widget's property that may affect style.
|
||||
|
||||
Style of widget is polished if current property value is different.
|
||||
"""
|
||||
|
||||
cur_value = widget.property(property_name)
|
||||
if cur_value == property_value:
|
||||
return
|
||||
widget.setProperty(property_name, property_value)
|
||||
widget.style().polish(widget)
|
||||
|
||||
|
||||
def get_qt_app():
|
||||
app = QtWidgets.QApplication.instance()
|
||||
if app is not None:
|
||||
return app
|
||||
|
||||
for attr_name in (
|
||||
"AA_EnableHighDpiScaling",
|
||||
"AA_UseHighDpiPixmaps",
|
||||
):
|
||||
attr = getattr(QtCore.Qt, attr_name, None)
|
||||
if attr is not None:
|
||||
QtWidgets.QApplication.setAttribute(attr)
|
||||
|
||||
if hasattr(QtWidgets.QApplication, "setHighDpiScaleFactorRoundingPolicy"):
|
||||
QtWidgets.QApplication.setHighDpiScaleFactorRoundingPolicy(
|
||||
QtCore.Qt.HighDpiScaleFactorRoundingPolicy.PassThrough
|
||||
)
|
||||
|
||||
return QtWidgets.QApplication(sys.argv)
|
||||
|
|
@ -1,90 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import appdirs
|
||||
|
||||
IS_BUILT_APPLICATION = getattr(sys, "frozen", False)
|
||||
|
||||
|
||||
def get_ayon_appdirs(*args):
|
||||
"""Local app data directory of AYON client.
|
||||
|
||||
Args:
|
||||
*args (Iterable[str]): Subdirectories/files in local app data dir.
|
||||
|
||||
Returns:
|
||||
str: Path to directory/file in local app data dir.
|
||||
"""
|
||||
|
||||
return os.path.join(
|
||||
appdirs.user_data_dir("AYON", "Ynput"),
|
||||
*args
|
||||
)
|
||||
|
||||
|
||||
def is_staging_enabled():
|
||||
"""Check if staging is enabled.
|
||||
|
||||
Returns:
|
||||
bool: True if staging is enabled.
|
||||
"""
|
||||
|
||||
return os.getenv("AYON_USE_STAGING") == "1"
|
||||
|
||||
|
||||
def _create_local_site_id():
|
||||
"""Create a local site identifier.
|
||||
|
||||
Returns:
|
||||
str: Randomly generated site id.
|
||||
"""
|
||||
|
||||
from coolname import generate_slug
|
||||
|
||||
new_id = generate_slug(3)
|
||||
|
||||
print("Created local site id \"{}\"".format(new_id))
|
||||
|
||||
return new_id
|
||||
|
||||
|
||||
def get_local_site_id():
|
||||
"""Get local site identifier.
|
||||
|
||||
Site id is created if does not exist yet.
|
||||
|
||||
Returns:
|
||||
str: Site id.
|
||||
"""
|
||||
|
||||
# used for background syncing
|
||||
site_id = os.environ.get("AYON_SITE_ID")
|
||||
if site_id:
|
||||
return site_id
|
||||
|
||||
site_id_path = get_ayon_appdirs("site_id")
|
||||
if os.path.exists(site_id_path):
|
||||
with open(site_id_path, "r") as stream:
|
||||
site_id = stream.read()
|
||||
|
||||
if not site_id:
|
||||
site_id = _create_local_site_id()
|
||||
with open(site_id_path, "w") as stream:
|
||||
stream.write(site_id)
|
||||
return site_id
|
||||
|
||||
|
||||
def get_ayon_launch_args(*args):
|
||||
"""Launch arguments that can be used to launch ayon process.
|
||||
|
||||
Args:
|
||||
*args (str): Additional arguments.
|
||||
|
||||
Returns:
|
||||
list[str]: Launch arguments.
|
||||
"""
|
||||
|
||||
output = [sys.executable]
|
||||
if not IS_BUILT_APPLICATION:
|
||||
output.append(sys.argv[0])
|
||||
output.extend(args)
|
||||
return output
|
||||
|
|
@ -5,6 +5,7 @@ import sys
|
|||
import code
|
||||
import click
|
||||
|
||||
from openpype import AYON_SERVER_ENABLED
|
||||
from .pype_commands import PypeCommands
|
||||
|
||||
|
||||
|
|
@ -46,7 +47,11 @@ def main(ctx):
|
|||
|
||||
if ctx.invoked_subcommand is None:
|
||||
# Print help if headless mode is used
|
||||
if os.environ.get("OPENPYPE_HEADLESS_MODE") == "1":
|
||||
if AYON_SERVER_ENABLED:
|
||||
is_headless = os.getenv("AYON_HEADLESS_MODE") == "1"
|
||||
else:
|
||||
is_headless = os.getenv("OPENPYPE_HEADLESS_MODE") == "1"
|
||||
if is_headless:
|
||||
print(ctx.get_help())
|
||||
sys.exit(0)
|
||||
else:
|
||||
|
|
@ -57,6 +62,9 @@ def main(ctx):
|
|||
@click.option("-d", "--dev", is_flag=True, help="Settings in Dev mode")
|
||||
def settings(dev):
|
||||
"""Show Pype Settings UI."""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
raise RuntimeError("AYON does not support 'settings' command.")
|
||||
PypeCommands().launch_settings_gui(dev)
|
||||
|
||||
|
||||
|
|
@ -110,6 +118,8 @@ def eventserver(ftrack_url,
|
|||
on linux and window service).
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
raise RuntimeError("AYON does not support 'eventserver' command.")
|
||||
PypeCommands().launch_eventservercli(
|
||||
ftrack_url,
|
||||
ftrack_user,
|
||||
|
|
@ -134,6 +144,10 @@ def webpublisherwebserver(executable, upload_dir, host=None, port=None):
|
|||
Expect "pype.club" user created on Ftrack.
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
raise RuntimeError(
|
||||
"AYON does not support 'webpublisherwebserver' command."
|
||||
)
|
||||
PypeCommands().launch_webpublisher_webservercli(
|
||||
upload_dir=upload_dir,
|
||||
executable=executable,
|
||||
|
|
@ -196,6 +210,10 @@ def remotepublishfromapp(project, path, host, user=None, targets=None):
|
|||
More than one path is allowed.
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
raise RuntimeError(
|
||||
"AYON does not support 'remotepublishfromapp' command."
|
||||
)
|
||||
PypeCommands.remotepublishfromapp(
|
||||
project, path, host, user, targets=targets
|
||||
)
|
||||
|
|
@ -214,11 +232,15 @@ def remotepublish(project, path, user=None, targets=None):
|
|||
More than one path is allowed.
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
raise RuntimeError("AYON does not support 'remotepublish' command.")
|
||||
PypeCommands.remotepublish(project, path, user, targets=targets)
|
||||
|
||||
|
||||
@main.command(context_settings={"ignore_unknown_options": True})
|
||||
def projectmanager():
|
||||
if AYON_SERVER_ENABLED:
|
||||
raise RuntimeError("AYON does not support 'projectmanager' command.")
|
||||
PypeCommands().launch_project_manager()
|
||||
|
||||
|
||||
|
|
@ -316,12 +338,18 @@ def runtests(folder, mark, pyargs, test_data_folder, persist, app_variant,
|
|||
persist, app_variant, timeout, setup_only)
|
||||
|
||||
|
||||
@main.command()
|
||||
@main.command(help="DEPRECATED - run sync server")
|
||||
@click.pass_context
|
||||
@click.option("-a", "--active_site", required=True,
|
||||
help="Name of active stie")
|
||||
def syncserver(active_site):
|
||||
help="Name of active site")
|
||||
def syncserver(ctx, active_site):
|
||||
"""Run sync site server in background.
|
||||
|
||||
Deprecated:
|
||||
This command is deprecated and will be removed in future versions.
|
||||
Use '~/openpype_console module sync_server syncservice' instead.
|
||||
|
||||
Details:
|
||||
Some Site Sync use cases need to expose site to another one.
|
||||
For example if majority of artists work in studio, they are not using
|
||||
SS at all, but if you want to expose published assets to 'studio' site
|
||||
|
|
@ -335,7 +363,12 @@ def syncserver(active_site):
|
|||
var OPENPYPE_LOCAL_ID set to 'active_site'.
|
||||
"""
|
||||
|
||||
PypeCommands().syncserver(active_site)
|
||||
if AYON_SERVER_ENABLED:
|
||||
raise RuntimeError("AYON does not support 'syncserver' command.")
|
||||
|
||||
from openpype.modules.sync_server.sync_server_module import (
|
||||
syncservice)
|
||||
ctx.invoke(syncservice, active_site=active_site)
|
||||
|
||||
|
||||
@main.command()
|
||||
|
|
@ -347,6 +380,8 @@ def repack_version(directory):
|
|||
recalculating file checksums. It will try to use version detected in
|
||||
directory name.
|
||||
"""
|
||||
if AYON_SERVER_ENABLED:
|
||||
raise RuntimeError("AYON does not support 'repack-version' command.")
|
||||
PypeCommands().repack_version(directory)
|
||||
|
||||
|
||||
|
|
@ -358,6 +393,9 @@ def repack_version(directory):
|
|||
"--dbonly", help="Store only Database data", default=False, is_flag=True)
|
||||
def pack_project(project, dirpath, dbonly):
|
||||
"""Create a package of project with all files and database dump."""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
raise RuntimeError("AYON does not support 'pack-project' command.")
|
||||
PypeCommands().pack_project(project, dirpath, dbonly)
|
||||
|
||||
|
||||
|
|
@ -370,6 +408,8 @@ def pack_project(project, dirpath, dbonly):
|
|||
"--dbonly", help="Store only Database data", default=False, is_flag=True)
|
||||
def unpack_project(zipfile, root, dbonly):
|
||||
"""Create a package of project with all files and database dump."""
|
||||
if AYON_SERVER_ENABLED:
|
||||
raise RuntimeError("AYON does not support 'unpack-project' command.")
|
||||
PypeCommands().unpack_project(zipfile, root, dbonly)
|
||||
|
||||
|
||||
|
|
@ -384,9 +424,17 @@ def interactive():
|
|||
Executable 'openpype_gui' on Windows won't work.
|
||||
"""
|
||||
|
||||
from openpype.version import __version__
|
||||
if AYON_SERVER_ENABLED:
|
||||
version = os.environ["AYON_VERSION"]
|
||||
banner = (
|
||||
f"AYON launcher {version}\nPython {sys.version} on {sys.platform}"
|
||||
)
|
||||
else:
|
||||
from openpype.version import __version__
|
||||
|
||||
banner = f"OpenPype {__version__}\nPython {sys.version} on {sys.platform}"
|
||||
banner = (
|
||||
f"OpenPype {__version__}\nPython {sys.version} on {sys.platform}"
|
||||
)
|
||||
code.interact(banner)
|
||||
|
||||
|
||||
|
|
@ -395,11 +443,13 @@ def interactive():
|
|||
is_flag=True, default=False)
|
||||
def version(build):
|
||||
"""Print OpenPype version."""
|
||||
if AYON_SERVER_ENABLED:
|
||||
print(os.environ["AYON_VERSION"])
|
||||
return
|
||||
|
||||
from openpype.version import __version__
|
||||
from igniter.bootstrap_repos import BootstrapRepos, OpenPypeVersion
|
||||
from pathlib import Path
|
||||
import os
|
||||
|
||||
if getattr(sys, 'frozen', False):
|
||||
local_version = BootstrapRepos.get_version(
|
||||
|
|
|
|||
|
|
@ -212,16 +212,12 @@ def _process_referenced_pipeline_result(result, link_type):
|
|||
continue
|
||||
|
||||
for output in sorted(outputs_recursive, key=lambda o: o["depth"]):
|
||||
output_links = output.get("data", {}).get("inputLinks")
|
||||
if not output_links and output["type"] != "hero_version":
|
||||
continue
|
||||
|
||||
# Leaf
|
||||
if output["_id"] not in correctly_linked_ids:
|
||||
continue
|
||||
|
||||
_filter_input_links(
|
||||
output_links,
|
||||
output.get("data", {}).get("inputLinks"),
|
||||
link_type,
|
||||
correctly_linked_ids
|
||||
)
|
||||
|
|
|
|||
|
|
@ -133,7 +133,6 @@ def _get_default_template_name(templates):
|
|||
def _template_replacements_to_v3(template):
|
||||
return (
|
||||
template
|
||||
.replace("{folder[name]}", "{asset}")
|
||||
.replace("{product[name]}", "{subset}")
|
||||
.replace("{product[type]}", "{family}")
|
||||
)
|
||||
|
|
@ -715,7 +714,6 @@ def convert_v4_representation_to_v3(representation):
|
|||
if "template" in output_data:
|
||||
output_data["template"] = (
|
||||
output_data["template"]
|
||||
.replace("{folder[name]}", "{asset}")
|
||||
.replace("{product[name]}", "{subset}")
|
||||
.replace("{product[type]}", "{family}")
|
||||
)
|
||||
|
|
@ -977,7 +975,6 @@ def convert_create_representation_to_v4(representation, con):
|
|||
representation_data = representation["data"]
|
||||
representation_data["template"] = (
|
||||
representation_data["template"]
|
||||
.replace("{asset}", "{folder[name]}")
|
||||
.replace("{subset}", "{product[name]}")
|
||||
.replace("{family}", "{product[type]}")
|
||||
)
|
||||
|
|
@ -1266,7 +1263,6 @@ def convert_update_representation_to_v4(
|
|||
if "template" in attribs:
|
||||
attribs["template"] = (
|
||||
attribs["template"]
|
||||
.replace("{asset}", "{folder[name]}")
|
||||
.replace("{family}", "{product[type]}")
|
||||
.replace("{subset}", "{product[name]}")
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
||||
|
|
@ -13,8 +13,8 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
|||
|
||||
# Execute after workfile template copy
|
||||
order = 10
|
||||
app_groups = [
|
||||
"3dsmax",
|
||||
app_groups = {
|
||||
"3dsmax", "adsk_3dsmax",
|
||||
"maya",
|
||||
"nuke",
|
||||
"nukex",
|
||||
|
|
@ -26,8 +26,9 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
|||
"photoshop",
|
||||
"tvpaint",
|
||||
"substancepainter",
|
||||
"aftereffects"
|
||||
]
|
||||
"aftereffects",
|
||||
}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
if not self.data.get("start_last_workfile"):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import shutil
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from openpype.pipeline.workfile import (
|
||||
get_custom_workfile_template,
|
||||
get_custom_workfile_template_by_string_context
|
||||
|
|
@ -19,7 +19,8 @@ class CopyTemplateWorkfile(PreLaunchHook):
|
|||
|
||||
# Before `AddLastWorkfileToLaunchArgs`
|
||||
order = 0
|
||||
app_groups = ["blender", "photoshop", "tvpaint", "aftereffects"]
|
||||
app_groups = {"blender", "photoshop", "tvpaint", "aftereffects"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
"""Check if can copy template for context and do it if possible.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from openpype.pipeline.workfile import create_workdir_extra_folders
|
||||
|
||||
|
||||
|
|
@ -14,6 +14,7 @@ class CreateWorkdirExtraFolders(PreLaunchHook):
|
|||
|
||||
# Execute after workfile template copy
|
||||
order = 15
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
if not self.application.is_host:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import subprocess
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class LaunchFoundryAppsWindows(PreLaunchHook):
|
||||
|
|
@ -13,8 +13,9 @@ class LaunchFoundryAppsWindows(PreLaunchHook):
|
|||
|
||||
# Should be as last hook because must change launch arguments to string
|
||||
order = 1000
|
||||
app_groups = ["nuke", "nukeassist", "nukex", "hiero", "nukestudio"]
|
||||
platforms = ["windows"]
|
||||
app_groups = {"nuke", "nukeassist", "nukex", "hiero", "nukestudio"}
|
||||
platforms = {"windows"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Change `creationflags` to CREATE_NEW_CONSOLE
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from openpype.client import get_project, get_asset_by_name
|
||||
from openpype.lib import (
|
||||
from openpype.lib.applications import (
|
||||
PreLaunchHook,
|
||||
EnvironmentPrepData,
|
||||
prepare_app_environments,
|
||||
|
|
@ -10,6 +10,7 @@ from openpype.pipeline import Anatomy
|
|||
|
||||
class GlobalHostDataHook(PreLaunchHook):
|
||||
order = -100
|
||||
launch_types = set()
|
||||
|
||||
def execute(self):
|
||||
"""Prepare global objects to `data` that will be used for sure."""
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class LaunchWithTerminal(PreLaunchHook):
|
||||
|
|
@ -12,7 +12,8 @@ class LaunchWithTerminal(PreLaunchHook):
|
|||
"""
|
||||
order = 1000
|
||||
|
||||
platforms = ["darwin"]
|
||||
platforms = {"darwin"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
executable = str(self.launch_context.executable)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import os
|
||||
|
||||
from openpype.lib import (
|
||||
from openpype.lib import get_openpype_execute_args
|
||||
from openpype.lib.applications import (
|
||||
get_non_python_host_kwargs,
|
||||
PreLaunchHook,
|
||||
get_openpype_execute_args
|
||||
LaunchTypes,
|
||||
)
|
||||
from openpype.lib.applications import get_non_python_host_kwargs
|
||||
|
||||
from openpype import PACKAGE_DIR as OPENPYPE_DIR
|
||||
|
||||
|
|
@ -16,9 +17,10 @@ class NonPythonHostHook(PreLaunchHook):
|
|||
python script which launch the host. For these cases it is necessary to
|
||||
prepend python (or openpype) executable and script path before application's.
|
||||
"""
|
||||
app_groups = ["harmony", "photoshop", "aftereffects"]
|
||||
app_groups = {"harmony", "photoshop", "aftereffects"}
|
||||
|
||||
order = 20
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Pop executable
|
||||
|
|
@ -54,4 +56,3 @@ class NonPythonHostHook(PreLaunchHook):
|
|||
|
||||
self.launch_context.kwargs = \
|
||||
get_non_python_host_kwargs(self.launch_context.kwargs)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook
|
||||
|
||||
from openpype.pipeline.colorspace import (
|
||||
get_imageio_config
|
||||
)
|
||||
from openpype.pipeline.colorspace import get_imageio_config
|
||||
from openpype.pipeline.template_data import get_template_data_with_names
|
||||
|
||||
|
||||
|
|
@ -10,7 +8,7 @@ class OCIOEnvHook(PreLaunchHook):
|
|||
"""Set OCIO environment variable for hosts that use OpenColorIO."""
|
||||
|
||||
order = 0
|
||||
hosts = [
|
||||
hosts = {
|
||||
"substancepainter",
|
||||
"fusion",
|
||||
"blender",
|
||||
|
|
@ -20,8 +18,9 @@ class OCIOEnvHook(PreLaunchHook):
|
|||
"maya",
|
||||
"nuke",
|
||||
"hiero",
|
||||
"resolve"
|
||||
]
|
||||
"resolve",
|
||||
}
|
||||
launch_types = set()
|
||||
|
||||
def execute(self):
|
||||
"""Hook entry method."""
|
||||
|
|
@ -39,7 +38,8 @@ class OCIOEnvHook(PreLaunchHook):
|
|||
host_name=self.host_name,
|
||||
project_settings=self.data["project_settings"],
|
||||
anatomy_data=template_data,
|
||||
anatomy=self.data["anatomy"]
|
||||
anatomy=self.data["anatomy"],
|
||||
env=self.launch_context.env,
|
||||
)
|
||||
|
||||
if config_data:
|
||||
|
|
|
|||
|
|
@ -32,19 +32,26 @@ class HostDirmap(object):
|
|||
"""
|
||||
|
||||
def __init__(
|
||||
self, host_name, project_name, project_settings=None, sync_module=None
|
||||
self,
|
||||
host_name,
|
||||
project_name,
|
||||
project_settings=None,
|
||||
sync_module=None
|
||||
):
|
||||
self.host_name = host_name
|
||||
self.project_name = project_name
|
||||
self._project_settings = project_settings
|
||||
self._sync_module = sync_module # to limit reinit of Modules
|
||||
self._sync_module = sync_module
|
||||
# to limit reinit of Modules
|
||||
self._sync_module_discovered = sync_module is not None
|
||||
self._log = None
|
||||
|
||||
@property
|
||||
def sync_module(self):
|
||||
if self._sync_module is None:
|
||||
if not self._sync_module_discovered:
|
||||
self._sync_module_discovered = True
|
||||
manager = ModulesManager()
|
||||
self._sync_module = manager["sync_server"]
|
||||
self._sync_module = manager.get("sync_server")
|
||||
return self._sync_module
|
||||
|
||||
@property
|
||||
|
|
@ -151,21 +158,25 @@ class HostDirmap(object):
|
|||
"""
|
||||
project_name = self.project_name
|
||||
|
||||
sync_module = self.sync_module
|
||||
mapping = {}
|
||||
if (not self.sync_module.enabled or
|
||||
project_name not in self.sync_module.get_enabled_projects()):
|
||||
if (
|
||||
sync_module is None
|
||||
or not sync_module.enabled
|
||||
or project_name not in sync_module.get_enabled_projects()
|
||||
):
|
||||
return mapping
|
||||
|
||||
active_site = self.sync_module.get_local_normalized_site(
|
||||
self.sync_module.get_active_site(project_name))
|
||||
remote_site = self.sync_module.get_local_normalized_site(
|
||||
self.sync_module.get_remote_site(project_name))
|
||||
active_site = sync_module.get_local_normalized_site(
|
||||
sync_module.get_active_site(project_name))
|
||||
remote_site = sync_module.get_local_normalized_site(
|
||||
sync_module.get_remote_site(project_name))
|
||||
self.log.debug(
|
||||
"active {} - remote {}".format(active_site, remote_site)
|
||||
)
|
||||
|
||||
if active_site == "local" and active_site != remote_site:
|
||||
sync_settings = self.sync_module.get_sync_project_setting(
|
||||
sync_settings = sync_module.get_sync_project_setting(
|
||||
project_name,
|
||||
exclude_locals=False,
|
||||
cached=False)
|
||||
|
|
@ -179,7 +190,7 @@ class HostDirmap(object):
|
|||
self.log.debug("remote overrides {}".format(remote_overrides))
|
||||
|
||||
current_platform = platform.system().lower()
|
||||
remote_provider = self.sync_module.get_provider_for_site(
|
||||
remote_provider = sync_module.get_provider_for_site(
|
||||
project_name, remote_site
|
||||
)
|
||||
# dirmap has sense only with regular disk provider, in the workfile
|
||||
|
|
|
|||
|
|
@ -1,11 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
import six
|
||||
|
||||
from openpype.lib import (
|
||||
get_ffmpeg_tool_path,
|
||||
run_subprocess,
|
||||
)
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from pathlib import Path
|
||||
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class AddPythonScriptToLaunchArgs(PreLaunchHook):
|
||||
|
|
@ -8,9 +8,8 @@ class AddPythonScriptToLaunchArgs(PreLaunchHook):
|
|||
|
||||
# Append after file argument
|
||||
order = 15
|
||||
app_groups = [
|
||||
"blender",
|
||||
]
|
||||
app_groups = {"blender"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
if not self.launch_context.data.get("python_scripts"):
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
import re
|
||||
import subprocess
|
||||
from platform import system
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class InstallPySideToBlender(PreLaunchHook):
|
||||
|
|
@ -16,7 +16,8 @@ class InstallPySideToBlender(PreLaunchHook):
|
|||
blender's python packages.
|
||||
"""
|
||||
|
||||
app_groups = ["blender"]
|
||||
app_groups = {"blender"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Prelaunch hook is not crucial
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import subprocess
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class BlenderConsoleWindows(PreLaunchHook):
|
||||
|
|
@ -13,8 +13,9 @@ class BlenderConsoleWindows(PreLaunchHook):
|
|||
|
||||
# Should be as last hook because must change launch arguments to string
|
||||
order = 1000
|
||||
app_groups = ["blender"]
|
||||
platforms = ["windows"]
|
||||
app_groups = {"blender"}
|
||||
platforms = {"windows"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Change `creationflags` to CREATE_NEW_CONSOLE
|
||||
|
|
|
|||
|
|
@ -2,20 +2,18 @@ import os
|
|||
import shutil
|
||||
import winreg
|
||||
import subprocess
|
||||
from openpype.lib import PreLaunchHook, get_openpype_execute_args
|
||||
from openpype.hosts.celaction import scripts
|
||||
|
||||
CELACTION_SCRIPTS_DIR = os.path.dirname(
|
||||
os.path.abspath(scripts.__file__)
|
||||
)
|
||||
from openpype.lib import get_openpype_execute_args
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from openpype.hosts.celaction import CELACTION_ROOT_DIR
|
||||
|
||||
|
||||
class CelactionPrelaunchHook(PreLaunchHook):
|
||||
"""
|
||||
Bootstrap celacion with pype
|
||||
"""
|
||||
app_groups = ["celaction"]
|
||||
platforms = ["windows"]
|
||||
app_groups = {"celaction"}
|
||||
platforms = {"windows"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
asset_doc = self.data["asset_doc"]
|
||||
|
|
@ -37,7 +35,9 @@ class CelactionPrelaunchHook(PreLaunchHook):
|
|||
winreg.KEY_ALL_ACCESS
|
||||
)
|
||||
|
||||
path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py")
|
||||
path_to_cli = os.path.join(
|
||||
CELACTION_ROOT_DIR, "scripts", "publish_cli.py"
|
||||
)
|
||||
subprocess_args = get_openpype_execute_args("run", path_to_cli)
|
||||
openpype_executable = subprocess_args.pop(0)
|
||||
workfile_settings = self.get_workfile_settings()
|
||||
|
|
@ -122,9 +122,8 @@ class CelactionPrelaunchHook(PreLaunchHook):
|
|||
if not os.path.exists(workfile_path):
|
||||
# TODO add ability to set different template workfile path via
|
||||
# settings
|
||||
openpype_celaction_dir = os.path.dirname(CELACTION_SCRIPTS_DIR)
|
||||
template_path = os.path.join(
|
||||
openpype_celaction_dir,
|
||||
CELACTION_ROOT_DIR,
|
||||
"resources",
|
||||
"celaction_template_scene.scn"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,13 +6,10 @@ import socket
|
|||
from pprint import pformat
|
||||
|
||||
from openpype.lib import (
|
||||
PreLaunchHook,
|
||||
get_openpype_username,
|
||||
run_subprocess,
|
||||
)
|
||||
from openpype.lib.applications import (
|
||||
ApplicationLaunchFailed
|
||||
)
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from openpype.hosts import flame as opflame
|
||||
|
||||
|
||||
|
|
@ -22,11 +19,12 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
Will make sure flame_script_dirs are copied to user's folder defined
|
||||
in environment var FLAME_SCRIPT_DIR.
|
||||
"""
|
||||
app_groups = ["flame"]
|
||||
app_groups = {"flame"}
|
||||
permissions = 0o777
|
||||
|
||||
wtc_script_path = os.path.join(
|
||||
opflame.HOST_DIR, "api", "scripts", "wiretap_com.py")
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -2,12 +2,16 @@ import os
|
|||
import shutil
|
||||
import platform
|
||||
from pathlib import Path
|
||||
from openpype.lib import PreLaunchHook, ApplicationLaunchFailed
|
||||
from openpype.hosts.fusion import (
|
||||
FUSION_HOST_DIR,
|
||||
FUSION_VERSIONS_DICT,
|
||||
get_fusion_version,
|
||||
)
|
||||
from openpype.lib.applications import (
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
ApplicationLaunchFailed,
|
||||
)
|
||||
|
||||
|
||||
class FusionCopyPrefsPrelaunch(PreLaunchHook):
|
||||
|
|
@ -21,8 +25,9 @@ class FusionCopyPrefsPrelaunch(PreLaunchHook):
|
|||
Master.prefs is defined in openpype/hosts/fusion/deploy/fusion_shared.prefs
|
||||
"""
|
||||
|
||||
app_groups = ["fusion"]
|
||||
app_groups = {"fusion"}
|
||||
order = 2
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def get_fusion_profile_name(self, profile_version) -> str:
|
||||
# Returns 'Default', unless FUSION16_PROFILE is set
|
||||
|
|
|
|||
|
|
@ -1,5 +1,9 @@
|
|||
import os
|
||||
from openpype.lib import PreLaunchHook, ApplicationLaunchFailed
|
||||
from openpype.lib.applications import (
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
ApplicationLaunchFailed,
|
||||
)
|
||||
from openpype.hosts.fusion import (
|
||||
FUSION_HOST_DIR,
|
||||
FUSION_VERSIONS_DICT,
|
||||
|
|
@ -17,8 +21,9 @@ class FusionPrelaunch(PreLaunchHook):
|
|||
Fusion 18 : Python 3.6 - 3.10
|
||||
"""
|
||||
|
||||
app_groups = ["fusion"]
|
||||
app_groups = {"fusion"}
|
||||
order = 1
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# making sure python 3 is installed at provided path
|
||||
|
|
|
|||
|
|
@ -167,9 +167,12 @@ class HoudiniCreatorBase(object):
|
|||
class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
||||
"""Base class for most of the Houdini creator plugins."""
|
||||
selected_nodes = []
|
||||
settings_name = None
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
try:
|
||||
self.selected_nodes = []
|
||||
|
||||
if pre_create_data.get("use_selection"):
|
||||
self.selected_nodes = hou.selectedNodes()
|
||||
|
||||
|
|
@ -293,21 +296,20 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
|||
"""
|
||||
return [hou.ropNodeTypeCategory()]
|
||||
|
||||
def get_creator_settings(self, project_settings, settings_key=None):
|
||||
if not settings_key:
|
||||
settings_key = self.__class__.__name__
|
||||
return project_settings["houdini"]["create"][settings_key]
|
||||
|
||||
def apply_settings(
|
||||
self,
|
||||
project_settings,
|
||||
system_settings
|
||||
):
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
"""Method called on initialization of plugin to apply settings."""
|
||||
|
||||
# plugin settings
|
||||
plugin_settings = self.get_creator_settings(project_settings)
|
||||
settings_name = self.settings_name
|
||||
if settings_name is None:
|
||||
settings_name = self.__class__.__name__
|
||||
|
||||
# individual attributes
|
||||
self.default_variants = plugin_settings.get(
|
||||
"default_variants") or self.default_variants
|
||||
settings = project_settings["houdini"]["create"]
|
||||
settings = settings.get(settings_name)
|
||||
if settings is None:
|
||||
self.log.debug(
|
||||
"No settings found for {}".format(self.__class__.__name__)
|
||||
)
|
||||
return
|
||||
|
||||
for key, value in settings.items():
|
||||
setattr(self, key, value)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class SetPath(PreLaunchHook):
|
||||
|
|
@ -6,7 +6,8 @@ class SetPath(PreLaunchHook):
|
|||
|
||||
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||
"""
|
||||
app_groups = ["houdini"]
|
||||
app_groups = {"houdini"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
workdir = self.launch_context.env.get("AVALON_WORKDIR", "")
|
||||
|
|
|
|||
|
|
@ -12,7 +12,10 @@ class CreateArnoldAss(plugin.HoudiniCreator):
|
|||
icon = "magic"
|
||||
default_variants = ["Main"]
|
||||
|
||||
|
||||
# Default extension: `.ass` or `.ass.gz`
|
||||
# however calling HoudiniCreator.create()
|
||||
# will override it by the value in the project settings
|
||||
ext = ".ass"
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from openpype.lib import EnumDef
|
|||
class CreateBGEO(plugin.HoudiniCreator):
|
||||
"""BGEO pointcache creator."""
|
||||
identifier = "io.openpype.creators.houdini.bgeo"
|
||||
label = "BGEO PointCache"
|
||||
label = "PointCache (Bgeo)"
|
||||
family = "pointcache"
|
||||
icon = "gears"
|
||||
|
||||
|
|
|
|||
|
|
@ -67,6 +67,7 @@ class CreateKarmaROP(plugin.HoudiniCreator):
|
|||
camera = None
|
||||
for node in self.selected_nodes:
|
||||
if node.type().name() == "cam":
|
||||
camera = node.path()
|
||||
has_camera = pre_create_data.get("cam_res")
|
||||
if has_camera:
|
||||
res_x = node.evalParm("resx")
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import hou
|
|||
class CreatePointCache(plugin.HoudiniCreator):
|
||||
"""Alembic ROP to pointcache"""
|
||||
identifier = "io.openpype.creators.houdini.pointcache"
|
||||
label = "Point Cache"
|
||||
label = "PointCache (Abc)"
|
||||
family = "pointcache"
|
||||
icon = "gears"
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
family = "redshift_rop"
|
||||
icon = "magic"
|
||||
default_variants = ["master"]
|
||||
|
||||
ext = "exr"
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
|
|
|
|||
|
|
@ -1,26 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validator plugin for SOP Path in bgeo isntance."""
|
||||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateNoSOPPath(pyblish.api.InstancePlugin):
|
||||
"""Validate if SOP Path in BGEO instance exists."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["bgeo"]
|
||||
label = "Validate BGEO SOP Path"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
import hou
|
||||
|
||||
node = hou.node(instance.data.get("instance_node"))
|
||||
sop_path = node.evalParm("soppath")
|
||||
if not sop_path:
|
||||
raise PublishValidationError(
|
||||
("Empty SOP Path ('soppath' parameter) found in "
|
||||
f"the BGEO instance Geometry - {node.path()}"))
|
||||
if not isinstance(hou.node(sop_path), hou.SopNode):
|
||||
raise PublishValidationError(
|
||||
"SOP path is not pointing to valid SOP node.")
|
||||
|
|
@ -32,8 +32,9 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
nodes = [n.path() for n in invalid]
|
||||
raise PublishValidationError(
|
||||
"See log for details. " "Invalid nodes: {0}".format(invalid),
|
||||
"See log for details. " "Invalid nodes: {0}".format(nodes),
|
||||
title=self.label
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -7,8 +7,6 @@ from openpype.pipeline import (
|
|||
)
|
||||
from openpype.pipeline.publish import RepairAction
|
||||
|
||||
from openpype.pipeline.publish import RepairAction
|
||||
|
||||
|
||||
class ValidateWorkfilePaths(
|
||||
pyblish.api.InstancePlugin, OptionalPyblishPluginMixin):
|
||||
|
|
|
|||
|
|
@ -185,7 +185,10 @@ class MaxCreatorBase(object):
|
|||
node = rt.Container(name=node)
|
||||
|
||||
attrs = rt.Execute(MS_CUSTOM_ATTRIB)
|
||||
rt.custAttributes.add(node.baseObject, attrs)
|
||||
modifier = rt.EmptyModifier()
|
||||
rt.addModifier(node, modifier)
|
||||
node.modifiers[0].name = "OP Data"
|
||||
rt.custAttributes.add(node.modifiers[0], attrs)
|
||||
|
||||
return node
|
||||
|
||||
|
|
@ -215,7 +218,8 @@ class MaxCreator(Creator, MaxCreatorBase):
|
|||
|
||||
# Setting the property
|
||||
rt.setProperty(
|
||||
instance_node.openPypeData, "all_handles", node_list)
|
||||
instance_node.modifiers[0].openPypeData,
|
||||
"all_handles", node_list)
|
||||
|
||||
self._add_instance_to_context(instance)
|
||||
imprint(instance_node.name, instance.data_to_store())
|
||||
|
|
@ -254,8 +258,8 @@ class MaxCreator(Creator, MaxCreatorBase):
|
|||
instance_node = rt.GetNodeByName(
|
||||
instance.data.get("instance_node"))
|
||||
if instance_node:
|
||||
count = rt.custAttributes.count(instance_node)
|
||||
rt.custAttributes.delete(instance_node, count)
|
||||
count = rt.custAttributes.count(instance_node.modifiers[0])
|
||||
rt.custAttributes.delete(instance_node.modifiers[0], count)
|
||||
rt.Delete(instance_node)
|
||||
|
||||
self._remove_instance_from_context(instance)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pre-launch to force 3ds max startup script."""
|
||||
from openpype.lib import PreLaunchHook
|
||||
import os
|
||||
from openpype.hosts.max import MAX_HOST_DIR
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class ForceStartupScript(PreLaunchHook):
|
||||
|
|
@ -13,12 +14,14 @@ class ForceStartupScript(PreLaunchHook):
|
|||
|
||||
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||
"""
|
||||
app_groups = ["3dsmax"]
|
||||
app_groups = {"3dsmax", "adsk_3dsmax"}
|
||||
order = 11
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
startup_args = [
|
||||
"-U",
|
||||
"MAXScript",
|
||||
f"{os.getenv('OPENPYPE_ROOT')}\\openpype\\hosts\\max\\startup\\startup.ms"] # noqa
|
||||
os.path.join(MAX_HOST_DIR, "startup", "startup.ms"),
|
||||
]
|
||||
self.launch_context.launch_args.append(startup_args)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pre-launch hook to inject python environment."""
|
||||
from openpype.lib import PreLaunchHook
|
||||
import os
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class InjectPythonPath(PreLaunchHook):
|
||||
|
|
@ -13,7 +13,8 @@ class InjectPythonPath(PreLaunchHook):
|
|||
|
||||
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||
"""
|
||||
app_groups = ["3dsmax"]
|
||||
app_groups = {"3dsmax", "adsk_3dsmax"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
self.launch_context.env["MAX_PYTHONPATH"] = os.environ["PYTHONPATH"]
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class SetPath(PreLaunchHook):
|
||||
|
|
@ -6,7 +6,8 @@ class SetPath(PreLaunchHook):
|
|||
|
||||
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||
"""
|
||||
app_groups = ["max"]
|
||||
app_groups = {"max"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
workdir = self.launch_context.env.get("AVALON_WORKDIR", "")
|
||||
|
|
|
|||
|
|
@ -17,6 +17,6 @@ class CollectMembers(pyblish.api.InstancePlugin):
|
|||
container = rt.GetNodeByName(instance.data["instance_node"])
|
||||
instance.data["members"] = [
|
||||
member.node for member
|
||||
in container.openPypeData.all_handles
|
||||
in container.modifiers[0].openPypeData.all_handles
|
||||
]
|
||||
self.log.debug("{}".format(instance.data["members"]))
|
||||
|
|
|
|||
|
|
@ -439,7 +439,7 @@ class RenderlayerCreator(NewCreator, MayaCreatorBase):
|
|||
|
||||
creator_identifier = cmds.getAttr(node + ".creator_identifier")
|
||||
if creator_identifier == self.identifier:
|
||||
self.log.info(f"Found node: {node}")
|
||||
self.log.info("Found node: {}".format(node))
|
||||
return node
|
||||
|
||||
def _create_layer_instance_node(self, layer):
|
||||
|
|
@ -581,6 +581,9 @@ class ReferenceLoader(Loader):
|
|||
formatting_data = {
|
||||
"asset_name": asset['name'],
|
||||
"asset_type": asset['type'],
|
||||
"folder": {
|
||||
"name": asset["name"],
|
||||
},
|
||||
"subset": subset['name'],
|
||||
"family": (
|
||||
subset['data'].get('family') or
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class MayaPreAutoLoadPlugins(PreLaunchHook):
|
||||
|
|
@ -6,7 +6,8 @@ class MayaPreAutoLoadPlugins(PreLaunchHook):
|
|||
|
||||
# Before AddLastWorkfileToLaunchArgs
|
||||
order = 9
|
||||
app_groups = ["maya"]
|
||||
app_groups = {"maya"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from openpype.hosts.maya.lib import create_workspace_mel
|
||||
|
||||
|
||||
|
|
@ -7,7 +7,8 @@ class PreCopyMel(PreLaunchHook):
|
|||
|
||||
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||
"""
|
||||
app_groups = ["maya"]
|
||||
app_groups = {"maya"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
project_doc = self.data["project_doc"]
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class MayaPreOpenWorkfilePostInitialization(PreLaunchHook):
|
||||
|
|
@ -6,7 +6,8 @@ class MayaPreOpenWorkfilePostInitialization(PreLaunchHook):
|
|||
|
||||
# Before AddLastWorkfileToLaunchArgs.
|
||||
order = 9
|
||||
app_groups = ["maya"]
|
||||
app_groups = {"maya"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class CreateModel(plugin.MayaCreator):
|
|||
label = "Model"
|
||||
family = "model"
|
||||
icon = "cube"
|
||||
defaults = ["Main", "Proxy", "_MD", "_HD", "_LD"]
|
||||
default_variants = ["Main", "Proxy", "_MD", "_HD", "_LD"]
|
||||
|
||||
write_color_sets = False
|
||||
write_face_sets = False
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class CreateSetDress(plugin.MayaCreator):
|
|||
label = "Set Dress"
|
||||
family = "setdress"
|
||||
icon = "cubes"
|
||||
defaults = ["Main", "Anim"]
|
||||
default_variants = ["Main", "Anim"]
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
|
|
|
|||
|
|
@ -107,6 +107,9 @@ class CollectReview(pyblish.api.InstancePlugin):
|
|||
data["displayLights"] = display_lights
|
||||
data["burninDataMembers"] = burninDataMembers
|
||||
|
||||
for key, value in instance.data["publish_attributes"].items():
|
||||
data["publish_attributes"][key] = value
|
||||
|
||||
# The review instance must be active
|
||||
cmds.setAttr(str(instance) + '.active', 1)
|
||||
|
||||
|
|
|
|||
|
|
@ -63,15 +63,10 @@ class ValidateModelContent(pyblish.api.InstancePlugin):
|
|||
return True
|
||||
|
||||
# Top group
|
||||
assemblies = cmds.ls(content_instance, assemblies=True, long=True)
|
||||
if len(assemblies) != 1 and cls.validate_top_group:
|
||||
top_parents = set([x.split("|")[1] for x in content_instance])
|
||||
if cls.validate_top_group and len(top_parents) != 1:
|
||||
cls.log.error("Must have exactly one top group")
|
||||
return assemblies
|
||||
if len(assemblies) == 0:
|
||||
cls.log.warning("No top group found. "
|
||||
"(Are there objects in the instance?"
|
||||
" Or is it parented in another group?)")
|
||||
return assemblies or True
|
||||
return top_parents
|
||||
|
||||
def _is_visible(node):
|
||||
"""Return whether node is visible"""
|
||||
|
|
@ -82,11 +77,11 @@ class ValidateModelContent(pyblish.api.InstancePlugin):
|
|||
visibility=True)
|
||||
|
||||
# The roots must be visible (the assemblies)
|
||||
for assembly in assemblies:
|
||||
if not _is_visible(assembly):
|
||||
cls.log.error("Invisible assembly (root node) is not "
|
||||
"allowed: {0}".format(assembly))
|
||||
invalid.add(assembly)
|
||||
for parent in top_parents:
|
||||
if not _is_visible(parent):
|
||||
cls.log.error("Invisible parent (root node) is not "
|
||||
"allowed: {0}".format(parent))
|
||||
invalid.add(parent)
|
||||
|
||||
# Ensure at least one shape is visible
|
||||
if not any(_is_visible(shape) for shape in shapes):
|
||||
|
|
|
|||
|
|
@ -424,10 +424,13 @@ def add_publish_knob(node):
|
|||
return node
|
||||
|
||||
|
||||
@deprecated
|
||||
@deprecated("openpype.hosts.nuke.api.lib.set_node_data")
|
||||
def set_avalon_knob_data(node, data=None, prefix="avalon:"):
|
||||
"""[DEPRECATED] Sets data into nodes's avalon knob
|
||||
|
||||
This function is still used but soon will be deprecated.
|
||||
Use `set_node_data` instead.
|
||||
|
||||
Arguments:
|
||||
node (nuke.Node): Nuke node to imprint with data,
|
||||
data (dict, optional): Data to be imprinted into AvalonTab
|
||||
|
|
@ -487,10 +490,13 @@ def set_avalon_knob_data(node, data=None, prefix="avalon:"):
|
|||
return node
|
||||
|
||||
|
||||
@deprecated
|
||||
@deprecated("openpype.hosts.nuke.api.lib.get_node_data")
|
||||
def get_avalon_knob_data(node, prefix="avalon:", create=True):
|
||||
"""[DEPRECATED] Gets a data from nodes's avalon knob
|
||||
|
||||
This function is still used but soon will be deprecated.
|
||||
Use `get_node_data` instead.
|
||||
|
||||
Arguments:
|
||||
node (obj): Nuke node to search for data,
|
||||
prefix (str, optional): filtering prefix
|
||||
|
|
@ -1699,7 +1705,7 @@ def create_write_node_legacy(
|
|||
knob_value = float(knob_value)
|
||||
if knob_type == "bool":
|
||||
knob_value = bool(knob_value)
|
||||
if knob_type in ["2d_vector", "3d_vector"]:
|
||||
if knob_type in ["2d_vector", "3d_vector", "color", "box"]:
|
||||
knob_value = list(knob_value)
|
||||
|
||||
GN[knob_name].setValue(knob_value)
|
||||
|
|
@ -1715,7 +1721,7 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs):
|
|||
Args:
|
||||
node (nuke.Node): nuke node
|
||||
knob_settings (list): list of dict. Keys are `type`, `name`, `value`
|
||||
kwargs (dict)[optional]: keys for formatable knob settings
|
||||
kwargs (dict)[optional]: keys for formattable knob settings
|
||||
"""
|
||||
for knob in knob_settings:
|
||||
log.debug("__ knob: {}".format(pformat(knob)))
|
||||
|
|
@ -1732,7 +1738,7 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs):
|
|||
)
|
||||
continue
|
||||
|
||||
# first deal with formatable knob settings
|
||||
# first deal with formattable knob settings
|
||||
if knob_type == "formatable":
|
||||
template = knob["template"]
|
||||
to_type = knob["to_type"]
|
||||
|
|
@ -1741,8 +1747,8 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs):
|
|||
**kwargs
|
||||
)
|
||||
except KeyError as msg:
|
||||
log.warning("__ msg: {}".format(msg))
|
||||
raise KeyError(msg)
|
||||
raise KeyError(
|
||||
"Not able to format expression: {}".format(msg))
|
||||
|
||||
# convert value to correct type
|
||||
if to_type == "2d_vector":
|
||||
|
|
@ -1781,8 +1787,8 @@ def convert_knob_value_to_correct_type(knob_type, knob_value):
|
|||
knob_value = knob_value
|
||||
elif knob_type == "color_gui":
|
||||
knob_value = color_gui_to_int(knob_value)
|
||||
elif knob_type in ["2d_vector", "3d_vector", "color"]:
|
||||
knob_value = [float(v) for v in knob_value]
|
||||
elif knob_type in ["2d_vector", "3d_vector", "color", "box"]:
|
||||
knob_value = [float(val_) for val_ in knob_value]
|
||||
|
||||
return knob_value
|
||||
|
||||
|
|
@ -2204,7 +2210,6 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
continue
|
||||
preset_clrsp = input["colorspace"]
|
||||
|
||||
log.debug(preset_clrsp)
|
||||
if preset_clrsp is not None:
|
||||
current = n["colorspace"].value()
|
||||
future = str(preset_clrsp)
|
||||
|
|
@ -2686,7 +2691,15 @@ def _launch_workfile_app():
|
|||
host_tools.show_workfiles(parent=None, on_top=True)
|
||||
|
||||
|
||||
@deprecated("openpype.hosts.nuke.api.lib.start_workfile_template_builder")
|
||||
def process_workfile_builder():
|
||||
""" [DEPRECATED] Process workfile builder on nuke start
|
||||
|
||||
This function is deprecated and will be removed in future versions.
|
||||
Use settings for `project_settings/nuke/templated_workfile_build` which are
|
||||
supported by api `start_workfile_template_builder()`.
|
||||
"""
|
||||
|
||||
# to avoid looping of the callback, remove it!
|
||||
nuke.removeOnCreate(process_workfile_builder, nodeClass="Root")
|
||||
|
||||
|
|
@ -2695,11 +2708,6 @@ def process_workfile_builder():
|
|||
workfile_builder = project_settings["nuke"].get(
|
||||
"workfile_builder", {})
|
||||
|
||||
# get all imortant settings
|
||||
openlv_on = env_value_to_bool(
|
||||
env_key="AVALON_OPEN_LAST_WORKFILE",
|
||||
default=None)
|
||||
|
||||
# get settings
|
||||
createfv_on = workfile_builder.get("create_first_version") or None
|
||||
builder_on = workfile_builder.get("builder_on_start") or None
|
||||
|
|
@ -2740,20 +2748,15 @@ def process_workfile_builder():
|
|||
save_file(last_workfile_path)
|
||||
return
|
||||
|
||||
# skip opening of last version if it is not enabled
|
||||
if not openlv_on or not os.path.exists(last_workfile_path):
|
||||
return
|
||||
|
||||
log.info("Opening last workfile...")
|
||||
# open workfile
|
||||
open_file(last_workfile_path)
|
||||
|
||||
|
||||
def start_workfile_template_builder():
|
||||
from .workfile_template_builder import (
|
||||
build_workfile_template
|
||||
)
|
||||
|
||||
# remove callback since it would be duplicating the workfile
|
||||
nuke.removeOnCreate(start_workfile_template_builder, nodeClass="Root")
|
||||
|
||||
# to avoid looping of the callback, remove it!
|
||||
log.info("Starting workfile template builder...")
|
||||
try:
|
||||
|
|
@ -2761,8 +2764,6 @@ def start_workfile_template_builder():
|
|||
except TemplateProfileNotFound:
|
||||
log.warning("Template profile not found. Skipping...")
|
||||
|
||||
# remove callback since it would be duplicating the workfile
|
||||
nuke.removeOnCreate(start_workfile_template_builder, nodeClass="Root")
|
||||
|
||||
@deprecated
|
||||
def recreate_instance(origin_node, avalon_data=None):
|
||||
|
|
@ -2954,6 +2955,7 @@ class DirmapCache:
|
|||
"""Caching class to get settings and sync_module easily and only once."""
|
||||
_project_name = None
|
||||
_project_settings = None
|
||||
_sync_module_discovered = False
|
||||
_sync_module = None
|
||||
_mapping = None
|
||||
|
||||
|
|
@ -2971,8 +2973,10 @@ class DirmapCache:
|
|||
|
||||
@classmethod
|
||||
def sync_module(cls):
|
||||
if cls._sync_module is None:
|
||||
cls._sync_module = ModulesManager().modules_by_name["sync_server"]
|
||||
if not cls._sync_module_discovered:
|
||||
cls._sync_module_discovered = True
|
||||
cls._sync_module = ModulesManager().modules_by_name.get(
|
||||
"sync_server")
|
||||
return cls._sync_module
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import nuke
|
|||
|
||||
import os
|
||||
import importlib
|
||||
from collections import OrderedDict
|
||||
from collections import OrderedDict, defaultdict
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -34,6 +34,7 @@ from .lib import (
|
|||
get_main_window,
|
||||
add_publish_knob,
|
||||
WorkfileSettings,
|
||||
# TODO: remove this once workfile builder will be removed
|
||||
process_workfile_builder,
|
||||
start_workfile_template_builder,
|
||||
launch_workfiles_app,
|
||||
|
|
@ -155,11 +156,18 @@ def add_nuke_callbacks():
|
|||
"""
|
||||
nuke_settings = get_current_project_settings()["nuke"]
|
||||
workfile_settings = WorkfileSettings()
|
||||
|
||||
# Set context settings.
|
||||
nuke.addOnCreate(
|
||||
workfile_settings.set_context_settings, nodeClass="Root")
|
||||
|
||||
# adding favorites to file browser
|
||||
nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root")
|
||||
|
||||
# template builder callbacks
|
||||
nuke.addOnCreate(start_workfile_template_builder, nodeClass="Root")
|
||||
|
||||
# TODO: remove this callback once workfile builder will be removed
|
||||
nuke.addOnCreate(process_workfile_builder, nodeClass="Root")
|
||||
|
||||
# fix ffmpeg settings on script
|
||||
|
|
@ -169,11 +177,12 @@ def add_nuke_callbacks():
|
|||
nuke.addOnScriptLoad(check_inventory_versions)
|
||||
nuke.addOnScriptSave(check_inventory_versions)
|
||||
|
||||
# # set apply all workfile settings on script load and save
|
||||
# set apply all workfile settings on script load and save
|
||||
nuke.addOnScriptLoad(WorkfileSettings().set_context_settings)
|
||||
|
||||
|
||||
if nuke_settings["nuke-dirmap"]["enabled"]:
|
||||
log.info("Added Nuke's dirmaping callback ...")
|
||||
log.info("Added Nuke's dir-mapping callback ...")
|
||||
# Add dirmap for file paths.
|
||||
nuke.addFilenameFilter(dirmap_file_name_filter)
|
||||
|
||||
|
|
@ -537,7 +546,10 @@ def list_instances(creator_id=None):
|
|||
Returns:
|
||||
(list) of dictionaries matching instances format
|
||||
"""
|
||||
listed_instances = []
|
||||
instances_by_order = defaultdict(list)
|
||||
subset_instances = []
|
||||
instance_ids = set()
|
||||
|
||||
for node in nuke.allNodes(recurseGroups=True):
|
||||
|
||||
if node.Class() in ["Viewer", "Dot"]:
|
||||
|
|
@ -563,9 +575,57 @@ def list_instances(creator_id=None):
|
|||
if creator_id and instance_data["creator_identifier"] != creator_id:
|
||||
continue
|
||||
|
||||
listed_instances.append((node, instance_data))
|
||||
if instance_data["instance_id"] in instance_ids:
|
||||
instance_data.pop("instance_id")
|
||||
else:
|
||||
instance_ids.add(instance_data["instance_id"])
|
||||
|
||||
return listed_instances
|
||||
# node name could change, so update subset name data
|
||||
_update_subset_name_data(instance_data, node)
|
||||
|
||||
if "render_order" not in node.knobs():
|
||||
subset_instances.append((node, instance_data))
|
||||
continue
|
||||
|
||||
order = int(node["render_order"].value())
|
||||
instances_by_order[order].append((node, instance_data))
|
||||
|
||||
# Sort instances based on order attribute or subset name.
|
||||
# TODO: remove in future Publisher enhanced with sorting
|
||||
ordered_instances = []
|
||||
for key in sorted(instances_by_order.keys()):
|
||||
instances_by_subset = defaultdict(list)
|
||||
for node, data_ in instances_by_order[key]:
|
||||
instances_by_subset[data_["subset"]].append((node, data_))
|
||||
for subkey in sorted(instances_by_subset.keys()):
|
||||
ordered_instances.extend(instances_by_subset[subkey])
|
||||
|
||||
instances_by_subset = defaultdict(list)
|
||||
for node, data_ in subset_instances:
|
||||
instances_by_subset[data_["subset"]].append((node, data_))
|
||||
for key in sorted(instances_by_subset.keys()):
|
||||
ordered_instances.extend(instances_by_subset[key])
|
||||
|
||||
return ordered_instances
|
||||
|
||||
|
||||
def _update_subset_name_data(instance_data, node):
|
||||
"""Update subset name data in instance data.
|
||||
|
||||
Args:
|
||||
instance_data (dict): instance creator data
|
||||
node (nuke.Node): nuke node
|
||||
"""
|
||||
# make sure node name is subset name
|
||||
old_subset_name = instance_data["subset"]
|
||||
old_variant = instance_data["variant"]
|
||||
subset_name_root = old_subset_name.replace(old_variant, "")
|
||||
|
||||
new_subset_name = node.name()
|
||||
new_variant = new_subset_name.replace(subset_name_root, "")
|
||||
|
||||
instance_data["subset"] = new_subset_name
|
||||
instance_data["variant"] = new_variant
|
||||
|
||||
|
||||
def remove_instance(instance):
|
||||
|
|
|
|||
|
|
@ -212,9 +212,15 @@ class NukeCreator(NewCreator):
|
|||
created_instance["creator_attributes"].pop(key)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, _changes in update_list:
|
||||
for created_inst, changes in update_list:
|
||||
instance_node = created_inst.transient_data["node"]
|
||||
|
||||
# update instance node name if subset name changed
|
||||
if "subset" in changes.changed_keys:
|
||||
instance_node["name"].setValue(
|
||||
changes["subset"].new_value
|
||||
)
|
||||
|
||||
# in case node is not existing anymore (user erased it manually)
|
||||
try:
|
||||
instance_node.fullName()
|
||||
|
|
@ -256,6 +262,17 @@ class NukeWriteCreator(NukeCreator):
|
|||
family = "write"
|
||||
icon = "sign-out"
|
||||
|
||||
def get_linked_knobs(self):
|
||||
linked_knobs = []
|
||||
if "channels" in self.instance_attributes:
|
||||
linked_knobs.append("channels")
|
||||
if "ordered" in self.instance_attributes:
|
||||
linked_knobs.append("render_order")
|
||||
if "use_range_limit" in self.instance_attributes:
|
||||
linked_knobs.extend(["___", "first", "last", "use_limit"])
|
||||
|
||||
return linked_knobs
|
||||
|
||||
def integrate_links(self, node, outputs=True):
|
||||
# skip if no selection
|
||||
if not self.selected_node:
|
||||
|
|
@ -921,7 +938,11 @@ class ExporterReviewMov(ExporterReview):
|
|||
except Exception:
|
||||
self.log.info("`mov64_codec` knob was not found")
|
||||
|
||||
write_node["mov64_write_timecode"].setValue(1)
|
||||
try:
|
||||
write_node["mov64_write_timecode"].setValue(1)
|
||||
except Exception:
|
||||
self.log.info("`mov64_write_timecode` knob was not found")
|
||||
|
||||
write_node["raw"].setValue(1)
|
||||
# connect
|
||||
write_node.setInput(0, self.previous_node)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
"""Host API required Work Files tool"""
|
||||
import os
|
||||
import nuke
|
||||
import shutil
|
||||
from .utils import is_headless
|
||||
|
||||
|
||||
|
|
@ -21,21 +22,37 @@ def save_file(filepath):
|
|||
|
||||
|
||||
def open_file(filepath):
|
||||
|
||||
def read_script(nuke_script):
|
||||
nuke.scriptClear()
|
||||
nuke.scriptReadFile(nuke_script)
|
||||
nuke.Root()["name"].setValue(nuke_script)
|
||||
nuke.Root()["project_directory"].setValue(os.path.dirname(nuke_script))
|
||||
nuke.Root().setModified(False)
|
||||
|
||||
filepath = filepath.replace("\\", "/")
|
||||
|
||||
# To remain in the same window, we have to clear the script and read
|
||||
# in the contents of the workfile.
|
||||
nuke.scriptClear()
|
||||
# Nuke Preferences can be read after the script is read.
|
||||
read_script(filepath)
|
||||
|
||||
if not is_headless():
|
||||
autosave = nuke.toNode("preferences")["AutoSaveName"].evaluate()
|
||||
autosave_prmpt = "Autosave detected.\nWould you like to load the autosave file?" # noqa
|
||||
autosave_prmpt = "Autosave detected.\n" \
|
||||
"Would you like to load the autosave file?" # noqa
|
||||
if os.path.isfile(autosave) and nuke.ask(autosave_prmpt):
|
||||
filepath = autosave
|
||||
try:
|
||||
# Overwrite the filepath with autosave
|
||||
shutil.copy(autosave, filepath)
|
||||
# Now read the (auto-saved) script again
|
||||
read_script(filepath)
|
||||
except shutil.Error as err:
|
||||
nuke.message(
|
||||
"Detected autosave file could not be used.\n{}"
|
||||
|
||||
.format(err))
|
||||
|
||||
nuke.scriptReadFile(filepath)
|
||||
nuke.Root()["name"].setValue(filepath)
|
||||
nuke.Root()["project_directory"].setValue(os.path.dirname(filepath))
|
||||
nuke.Root().setModified(False)
|
||||
return True
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,12 @@
|
|||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook
|
||||
|
||||
|
||||
class PrelaunchNukeAssistHook(PreLaunchHook):
|
||||
"""
|
||||
Adding flag when nukeassist
|
||||
"""
|
||||
app_groups = ["nukeassist"]
|
||||
app_groups = {"nukeassist"}
|
||||
launch_types = set()
|
||||
|
||||
def execute(self):
|
||||
self.launch_context.env["NUKEASSIST"] = "1"
|
||||
|
|
|
|||
|
|
@ -64,9 +64,6 @@ class CreateWriteImage(napi.NukeWriteCreator):
|
|||
)
|
||||
|
||||
def create_instance_node(self, subset_name, instance_data):
|
||||
linked_knobs_ = []
|
||||
if "use_range_limit" in self.instance_attributes:
|
||||
linked_knobs_ = ["channels", "___", "first", "last", "use_limit"]
|
||||
|
||||
# add fpath_template
|
||||
write_data = {
|
||||
|
|
@ -81,7 +78,7 @@ class CreateWriteImage(napi.NukeWriteCreator):
|
|||
write_data,
|
||||
input=self.selected_node,
|
||||
prenodes=self.prenodes,
|
||||
linked_knobs=linked_knobs_,
|
||||
linked_knobs=self.get_linked_knobs(),
|
||||
**{
|
||||
"frame": nuke.frame()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,6 +30,9 @@ class CreateWritePrerender(napi.NukeWriteCreator):
|
|||
temp_rendering_path_template = (
|
||||
"{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}")
|
||||
|
||||
# Before write node render.
|
||||
order = 90
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
attr_defs = [
|
||||
BoolDef(
|
||||
|
|
@ -42,10 +45,6 @@ class CreateWritePrerender(napi.NukeWriteCreator):
|
|||
return attr_defs
|
||||
|
||||
def create_instance_node(self, subset_name, instance_data):
|
||||
linked_knobs_ = []
|
||||
if "use_range_limit" in self.instance_attributes:
|
||||
linked_knobs_ = ["channels", "___", "first", "last", "use_limit"]
|
||||
|
||||
# add fpath_template
|
||||
write_data = {
|
||||
"creator": self.__class__.__name__,
|
||||
|
|
@ -68,7 +67,7 @@ class CreateWritePrerender(napi.NukeWriteCreator):
|
|||
write_data,
|
||||
input=self.selected_node,
|
||||
prenodes=self.prenodes,
|
||||
linked_knobs=linked_knobs_,
|
||||
linked_knobs=self.get_linked_knobs(),
|
||||
**{
|
||||
"width": width,
|
||||
"height": height
|
||||
|
|
|
|||
|
|
@ -56,11 +56,15 @@ class CreateWriteRender(napi.NukeWriteCreator):
|
|||
actual_format = nuke.root().knob('format').value()
|
||||
width, height = (actual_format.width(), actual_format.height())
|
||||
|
||||
self.log.debug(">>>>>>> : {}".format(self.instance_attributes))
|
||||
self.log.debug(">>>>>>> : {}".format(self.get_linked_knobs()))
|
||||
|
||||
created_node = napi.create_write_node(
|
||||
subset_name,
|
||||
write_data,
|
||||
input=self.selected_node,
|
||||
prenodes=self.prenodes,
|
||||
linked_knobs=self.get_linked_knobs(),
|
||||
**{
|
||||
"width": width,
|
||||
"height": height
|
||||
|
|
|
|||
|
|
@ -91,14 +91,14 @@ class LoadClip(plugin.NukeLoader):
|
|||
# reset container id so it is always unique for each instance
|
||||
self.reset_container_id()
|
||||
|
||||
self.log.warning(self.extensions)
|
||||
|
||||
is_sequence = len(representation["files"]) > 1
|
||||
|
||||
if is_sequence:
|
||||
representation = self._representation_with_hash_in_frame(
|
||||
representation
|
||||
context["representation"] = \
|
||||
self._representation_with_hash_in_frame(
|
||||
representation
|
||||
)
|
||||
|
||||
filepath = self.filepath_from_context(context)
|
||||
filepath = filepath.replace("\\", "/")
|
||||
self.log.debug("_ filepath: {}".format(filepath))
|
||||
|
|
@ -260,6 +260,7 @@ class LoadClip(plugin.NukeLoader):
|
|||
representation = self._representation_with_hash_in_frame(
|
||||
representation
|
||||
)
|
||||
|
||||
filepath = get_representation_path(representation).replace("\\", "/")
|
||||
self.log.debug("_ filepath: {}".format(filepath))
|
||||
|
||||
|
|
|
|||
|
|
@ -193,4 +193,10 @@ class CollectNukeWrites(pyblish.api.InstancePlugin,
|
|||
if not instance.data.get("review"):
|
||||
instance.data["useSequenceForReview"] = False
|
||||
|
||||
# TODO temporarily set stagingDir as persistent for backward
|
||||
# compatibility. This is mainly focused on `renders`folders which
|
||||
# were previously not cleaned up (and could be used in read notes)
|
||||
# this logic should be removed and replaced with custom staging dir
|
||||
instance.data["stagingDir_persistent"] = True
|
||||
|
||||
self.log.debug("instance.data: {}".format(pformat(instance.data)))
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class PreLaunchResolveLastWorkfile(PreLaunchHook):
|
||||
|
|
@ -9,7 +9,8 @@ class PreLaunchResolveLastWorkfile(PreLaunchHook):
|
|||
workfile. This property is set explicitly in Launcher.
|
||||
"""
|
||||
order = 10
|
||||
app_groups = ["resolve"]
|
||||
app_groups = {"resolve"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
if not self.data.get("start_last_workfile"):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
import platform
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from openpype.hosts.resolve.utils import setup
|
||||
|
||||
|
||||
|
|
@ -30,7 +30,8 @@ class PreLaunchResolveSetup(PreLaunchHook):
|
|||
|
||||
"""
|
||||
|
||||
app_groups = ["resolve"]
|
||||
app_groups = {"resolve"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
current_platform = platform.system().lower()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
import openpype.hosts.resolve
|
||||
|
||||
|
||||
|
|
@ -9,7 +9,8 @@ class PreLaunchResolveStartup(PreLaunchHook):
|
|||
|
||||
"""
|
||||
order = 11
|
||||
app_groups = ["resolve"]
|
||||
app_groups = {"resolve"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Set the openpype prelaunch startup script path for easy access
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
from openpype.lib import (
|
||||
PreLaunchHook,
|
||||
get_openpype_execute_args
|
||||
)
|
||||
from openpype.lib import get_openpype_execute_args
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class TvpaintPrelaunchHook(PreLaunchHook):
|
||||
|
|
@ -13,7 +11,8 @@ class TvpaintPrelaunchHook(PreLaunchHook):
|
|||
Existence of last workfile is checked. If workfile does not exists tries
|
||||
to copy templated workfile from predefined path.
|
||||
"""
|
||||
app_groups = ["tvpaint"]
|
||||
app_groups = {"tvpaint"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Pop tvpaint executable
|
||||
|
|
|
|||
|
|
@ -12,6 +12,11 @@ class UnrealAddon(OpenPypeModule, IHostAddon):
|
|||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def get_global_environments(self):
|
||||
return {
|
||||
"AYON_UNREAL_ROOT": UNREAL_ROOT_DIR,
|
||||
}
|
||||
|
||||
def add_implementation_envs(self, env, app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
# Set AYON_UNREAL_PLUGIN required for Unreal implementation
|
||||
|
|
@ -54,7 +59,8 @@ class UnrealAddon(OpenPypeModule, IHostAddon):
|
|||
|
||||
# Set default environments if are not set via settings
|
||||
defaults = {
|
||||
"OPENPYPE_LOG_NO_COLORS": "True"
|
||||
"OPENPYPE_LOG_NO_COLORS": "True",
|
||||
"UE_PYTHONPATH": os.environ.get("PYTHONPATH", ""),
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
|
|
|
|||
|
|
@ -3,21 +3,22 @@
|
|||
import os
|
||||
import copy
|
||||
from pathlib import Path
|
||||
from openpype.widgets.splash_screen import SplashScreen
|
||||
|
||||
from qtpy import QtCore
|
||||
|
||||
from openpype import resources
|
||||
from openpype.lib.applications import (
|
||||
PreLaunchHook,
|
||||
ApplicationLaunchFailed,
|
||||
LaunchTypes,
|
||||
)
|
||||
from openpype.pipeline.workfile import get_workfile_template_key
|
||||
import openpype.hosts.unreal.lib as unreal_lib
|
||||
from openpype.hosts.unreal.ue_workers import (
|
||||
UEProjectGenerationWorker,
|
||||
UEPluginInstallWorker
|
||||
)
|
||||
|
||||
from openpype import resources
|
||||
from openpype.lib import (
|
||||
PreLaunchHook,
|
||||
ApplicationLaunchFailed,
|
||||
ApplicationNotFound,
|
||||
)
|
||||
from openpype.pipeline.workfile import get_workfile_template_key
|
||||
import openpype.hosts.unreal.lib as unreal_lib
|
||||
from openpype.hosts.unreal.ui import SplashScreen
|
||||
|
||||
|
||||
class UnrealPrelaunchHook(PreLaunchHook):
|
||||
|
|
@ -29,6 +30,8 @@ class UnrealPrelaunchHook(PreLaunchHook):
|
|||
shell script.
|
||||
|
||||
"""
|
||||
app_groups = {"unreal"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
|
@ -187,24 +190,36 @@ class UnrealPrelaunchHook(PreLaunchHook):
|
|||
|
||||
project_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Set "AYON_UNREAL_PLUGIN" to current process environment for
|
||||
# execution of `create_unreal_project`
|
||||
|
||||
if self.launch_context.env.get("AYON_UNREAL_PLUGIN"):
|
||||
self.log.info((
|
||||
f"{self.signature} using Ayon plugin from "
|
||||
f"{self.launch_context.env.get('AYON_UNREAL_PLUGIN')}"
|
||||
))
|
||||
env_key = "AYON_UNREAL_PLUGIN"
|
||||
if self.launch_context.env.get(env_key):
|
||||
os.environ[env_key] = self.launch_context.env[env_key]
|
||||
|
||||
# engine_path points to the specific Unreal Engine root
|
||||
# so, we are going up from the executable itself 3 levels.
|
||||
engine_path: Path = Path(executable).parents[3]
|
||||
|
||||
if not unreal_lib.check_plugin_existence(engine_path):
|
||||
self.exec_plugin_install(engine_path)
|
||||
# Check if new env variable exists, and if it does, if the path
|
||||
# actually contains the plugin. If not, install it.
|
||||
|
||||
built_plugin_path = self.launch_context.env.get(
|
||||
"AYON_BUILT_UNREAL_PLUGIN", None)
|
||||
|
||||
if unreal_lib.check_built_plugin_existance(built_plugin_path):
|
||||
self.log.info((
|
||||
f"{self.signature} using existing built Ayon plugin from "
|
||||
f"{built_plugin_path}"
|
||||
))
|
||||
unreal_lib.copy_built_plugin(engine_path, Path(built_plugin_path))
|
||||
else:
|
||||
# Set "AYON_UNREAL_PLUGIN" to current process environment for
|
||||
# execution of `create_unreal_project`
|
||||
env_key = "AYON_UNREAL_PLUGIN"
|
||||
if self.launch_context.env.get(env_key):
|
||||
self.log.info((
|
||||
f"{self.signature} using Ayon plugin from "
|
||||
f"{self.launch_context.env.get(env_key)}"
|
||||
))
|
||||
if self.launch_context.env.get(env_key):
|
||||
os.environ[env_key] = self.launch_context.env[env_key]
|
||||
|
||||
if not unreal_lib.check_plugin_existence(engine_path):
|
||||
self.exec_plugin_install(engine_path)
|
||||
|
||||
project_file = project_path / unreal_project_filename
|
||||
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
Subproject commit ff15c700771e719cc5f3d561ac5d6f7590623986
|
||||
Subproject commit 63266607ceb972a61484f046634ddfc9eb0b5757
|
||||
|
|
@ -369,11 +369,11 @@ def get_compatible_integration(
|
|||
|
||||
def get_path_to_cmdlet_project(ue_version: str) -> Path:
|
||||
cmd_project = Path(
|
||||
os.path.abspath(os.getenv("OPENPYPE_ROOT")))
|
||||
os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
# For now, only tested on Windows (For Linux and Mac
|
||||
# it has to be implemented)
|
||||
cmd_project /= f"openpype/hosts/unreal/integration/UE_{ue_version}"
|
||||
cmd_project /= f"integration/UE_{ue_version}"
|
||||
|
||||
# if the integration doesn't exist for current engine version
|
||||
# try to find the closest to it.
|
||||
|
|
@ -429,6 +429,36 @@ def get_build_id(engine_path: Path, ue_version: str) -> str:
|
|||
return "{" + loaded_modules.get("BuildId") + "}"
|
||||
|
||||
|
||||
def check_built_plugin_existance(plugin_path) -> bool:
|
||||
if not plugin_path:
|
||||
return False
|
||||
|
||||
integration_plugin_path = Path(plugin_path)
|
||||
|
||||
if not integration_plugin_path.is_dir():
|
||||
raise RuntimeError("Path to the integration plugin is null!")
|
||||
|
||||
if not (integration_plugin_path / "Binaries").is_dir() \
|
||||
or not (integration_plugin_path / "Intermediate").is_dir():
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def copy_built_plugin(engine_path: Path, plugin_path: Path) -> None:
|
||||
ayon_plugin_path: Path = engine_path / "Engine/Plugins/Marketplace/Ayon"
|
||||
|
||||
if not ayon_plugin_path.is_dir():
|
||||
ayon_plugin_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
engine_plugin_config_path: Path = ayon_plugin_path / "Config"
|
||||
engine_plugin_config_path.mkdir(exist_ok=True)
|
||||
|
||||
dir_util._path_created = {}
|
||||
|
||||
dir_util.copy_tree(plugin_path.as_posix(), ayon_plugin_path.as_posix())
|
||||
|
||||
|
||||
def check_plugin_existence(engine_path: Path, env: dict = None) -> bool:
|
||||
env = env or os.environ
|
||||
integration_plugin_path: Path = Path(env.get("AYON_UNREAL_PLUGIN", ""))
|
||||
|
|
|
|||
|
|
@ -40,17 +40,34 @@ def retrieve_exit_code(line: str):
|
|||
return None
|
||||
|
||||
|
||||
class UEProjectGenerationWorker(QtCore.QObject):
|
||||
class UEWorker(QtCore.QObject):
|
||||
finished = QtCore.Signal(str)
|
||||
failed = QtCore.Signal(str)
|
||||
failed = QtCore.Signal(str, int)
|
||||
progress = QtCore.Signal(int)
|
||||
log = QtCore.Signal(str)
|
||||
|
||||
engine_path: Path = None
|
||||
env = None
|
||||
|
||||
def execute(self):
|
||||
raise NotImplementedError("Please implement this method!")
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.execute()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
self.log.emit(str(e))
|
||||
self.log.emit(traceback.format_exc())
|
||||
self.failed.emit(str(e), 1)
|
||||
raise e
|
||||
|
||||
|
||||
class UEProjectGenerationWorker(UEWorker):
|
||||
stage_begin = QtCore.Signal(str)
|
||||
|
||||
ue_version: str = None
|
||||
project_name: str = None
|
||||
env = None
|
||||
engine_path: Path = None
|
||||
project_dir: Path = None
|
||||
dev_mode = False
|
||||
|
||||
|
|
@ -87,7 +104,7 @@ class UEProjectGenerationWorker(QtCore.QObject):
|
|||
self.project_name = unreal_project_name
|
||||
self.engine_path = engine_path
|
||||
|
||||
def run(self):
|
||||
def execute(self):
|
||||
# engine_path should be the location of UE_X.X folder
|
||||
|
||||
ue_editor_exe = ue_lib.get_editor_exe_path(self.engine_path,
|
||||
|
|
@ -298,15 +315,8 @@ class UEProjectGenerationWorker(QtCore.QObject):
|
|||
self.finished.emit("Project successfully built!")
|
||||
|
||||
|
||||
class UEPluginInstallWorker(QtCore.QObject):
|
||||
finished = QtCore.Signal(str)
|
||||
class UEPluginInstallWorker(UEWorker):
|
||||
installing = QtCore.Signal(str)
|
||||
failed = QtCore.Signal(str, int)
|
||||
progress = QtCore.Signal(int)
|
||||
log = QtCore.Signal(str)
|
||||
|
||||
engine_path: Path = None
|
||||
env = None
|
||||
|
||||
def setup(self, engine_path: Path, env: dict = None, ):
|
||||
self.engine_path = engine_path
|
||||
|
|
@ -374,7 +384,7 @@ class UEPluginInstallWorker(QtCore.QObject):
|
|||
|
||||
dir_util.remove_tree(temp_dir.as_posix())
|
||||
|
||||
def run(self):
|
||||
def execute(self):
|
||||
src_plugin_dir = Path(self.env.get("AYON_UNREAL_PLUGIN", ""))
|
||||
|
||||
if not os.path.isdir(src_plugin_dir):
|
||||
|
|
|
|||
5
openpype/hosts/unreal/ui/__init__.py
Normal file
5
openpype/hosts/unreal/ui/__init__.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
from .splash_screen import SplashScreen
|
||||
|
||||
__all__ = (
|
||||
"SplashScreen",
|
||||
)
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
from openpype import style, resources
|
||||
from igniter.nice_progress_bar import NiceProgressBar
|
||||
|
||||
|
||||
class SplashScreen(QtWidgets.QDialog):
|
||||
|
|
@ -143,7 +142,7 @@ class SplashScreen(QtWidgets.QDialog):
|
|||
button_layout.addWidget(self.close_btn)
|
||||
|
||||
# Progress Bar
|
||||
self.progress_bar = NiceProgressBar()
|
||||
self.progress_bar = QtWidgets.QProgressBar()
|
||||
self.progress_bar.setValue(0)
|
||||
self.progress_bar.setAlignment(QtCore.Qt.AlignTop)
|
||||
|
||||
|
|
@ -20,11 +20,10 @@ class WebpublisherAddon(OpenPypeModule, IHostAddon):
|
|||
Close Python process at the end.
|
||||
"""
|
||||
|
||||
from openpype.pipeline.publish.lib import remote_publish
|
||||
from .lib import get_webpublish_conn, publish_and_log
|
||||
from .lib import get_webpublish_conn, publish_and_log, publish_in_test
|
||||
|
||||
if is_test:
|
||||
remote_publish(log, close_plugin_name)
|
||||
publish_in_test(log, close_plugin_name)
|
||||
return
|
||||
|
||||
dbcon = get_webpublish_conn()
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ from openpype.client.mongo import OpenPypeMongoConnection
|
|||
from openpype.settings import get_project_settings
|
||||
from openpype.lib import Logger
|
||||
from openpype.lib.profiles_filtering import filter_profiles
|
||||
from openpype.pipeline.publish.lib import find_close_plugin
|
||||
|
||||
ERROR_STATUS = "error"
|
||||
IN_PROGRESS_STATUS = "in_progress"
|
||||
|
|
@ -68,6 +67,46 @@ def get_batch_asset_task_info(ctx):
|
|||
return asset, task_name, task_type
|
||||
|
||||
|
||||
def find_close_plugin(close_plugin_name, log):
|
||||
if close_plugin_name:
|
||||
plugins = pyblish.api.discover()
|
||||
for plugin in plugins:
|
||||
if plugin.__name__ == close_plugin_name:
|
||||
return plugin
|
||||
|
||||
log.debug("Close plugin not found, app might not close.")
|
||||
|
||||
|
||||
def publish_in_test(log, close_plugin_name=None):
|
||||
"""Loops through all plugins, logs to console. Used for tests.
|
||||
|
||||
Args:
|
||||
log (Logger)
|
||||
close_plugin_name (Optional[str]): Name of plugin with responsibility
|
||||
to close application.
|
||||
"""
|
||||
|
||||
# Error exit as soon as any error occurs.
|
||||
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
|
||||
|
||||
close_plugin = find_close_plugin(close_plugin_name, log)
|
||||
|
||||
for result in pyblish.util.publish_iter():
|
||||
for record in result["records"]:
|
||||
# Why do we log again? pyblish logger is logging to stdout...
|
||||
log.info("{}: {}".format(result["plugin"].label, record.msg))
|
||||
|
||||
if not result["error"]:
|
||||
continue
|
||||
|
||||
# QUESTION We don't break on error?
|
||||
error_message = error_format.format(**result)
|
||||
log.error(error_message)
|
||||
if close_plugin: # close host app explicitly after error
|
||||
context = pyblish.api.Context()
|
||||
close_plugin().process(context)
|
||||
|
||||
|
||||
def get_webpublish_conn():
|
||||
"""Get connection to OP 'webpublishes' collection."""
|
||||
mongo_client = OpenPypeMongoConnection.get_mongo_client()
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import pyblish.util
|
|||
from openpype.lib import Logger
|
||||
from openpype.lib.applications import (
|
||||
ApplicationManager,
|
||||
get_app_environments_for_context,
|
||||
LaunchTypes,
|
||||
)
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.webpublisher.api import WebpublisherHost
|
||||
|
|
@ -156,22 +156,31 @@ def cli_publish_from_app(
|
|||
found_variant_key = find_variant_key(application_manager, host_name)
|
||||
app_name = "{}/{}".format(host_name, found_variant_key)
|
||||
|
||||
data = {
|
||||
"last_workfile_path": workfile_path,
|
||||
"start_last_workfile": True,
|
||||
"project_name": project_name,
|
||||
"asset_name": asset_name,
|
||||
"task_name": task_name,
|
||||
"launch_type": LaunchTypes.automated,
|
||||
}
|
||||
launch_context = application_manager.create_launch_context(
|
||||
app_name, **data)
|
||||
launch_context.run_prelaunch_hooks()
|
||||
|
||||
# must have for proper launch of app
|
||||
env = get_app_environments_for_context(
|
||||
project_name,
|
||||
asset_name,
|
||||
task_name,
|
||||
app_name
|
||||
)
|
||||
env = launch_context.env
|
||||
print("env:: {}".format(env))
|
||||
env["OPENPYPE_PUBLISH_DATA"] = batch_path
|
||||
# must pass identifier to update log lines for a batch
|
||||
env["BATCH_LOG_ID"] = str(_id)
|
||||
env["HEADLESS_PUBLISH"] = 'true' # to use in app lib
|
||||
env["USER_EMAIL"] = user_email
|
||||
|
||||
os.environ.update(env)
|
||||
|
||||
os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
|
||||
# must pass identifier to update log lines for a batch
|
||||
os.environ["BATCH_LOG_ID"] = str(_id)
|
||||
os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
|
||||
os.environ["USER_EMAIL"] = user_email
|
||||
|
||||
# Why is this here? Registered host in this process does not affect
|
||||
# regitered host in launched process.
|
||||
pyblish.api.register_host(host_name)
|
||||
if targets:
|
||||
if isinstance(targets, str):
|
||||
|
|
@ -184,15 +193,7 @@ def cli_publish_from_app(
|
|||
os.environ["PYBLISH_TARGETS"] = os.pathsep.join(
|
||||
set(current_targets))
|
||||
|
||||
data = {
|
||||
"last_workfile_path": workfile_path,
|
||||
"start_last_workfile": True,
|
||||
"project_name": project_name,
|
||||
"asset_name": asset_name,
|
||||
"task_name": task_name
|
||||
}
|
||||
|
||||
launched_app = application_manager.launch(app_name, **data)
|
||||
launched_app = application_manager.launch_with_context(launch_context)
|
||||
|
||||
timeout = get_timeout(project_name, host_name, task_type)
|
||||
|
||||
|
|
|
|||
|
|
@ -5,11 +5,11 @@
|
|||
import sys
|
||||
import os
|
||||
import site
|
||||
from openpype import PACKAGE_DIR
|
||||
|
||||
# Add Python version specific vendor folder
|
||||
python_version_dir = os.path.join(
|
||||
os.getenv("OPENPYPE_REPOS_ROOT", ""),
|
||||
"openpype", "vendor", "python", "python_{}".format(sys.version[0])
|
||||
PACKAGE_DIR, "vendor", "python", "python_{}".format(sys.version[0])
|
||||
)
|
||||
# Prepend path in sys paths
|
||||
sys.path.insert(0, python_version_dir)
|
||||
|
|
@ -55,11 +55,13 @@ from .env_tools import (
|
|||
|
||||
from .terminal import Terminal
|
||||
from .execute import (
|
||||
get_ayon_launcher_args,
|
||||
get_openpype_execute_args,
|
||||
get_linux_launcher_args,
|
||||
execute,
|
||||
run_subprocess,
|
||||
run_detached_process,
|
||||
run_ayon_launcher_process,
|
||||
run_openpype_process,
|
||||
clean_envs_for_openpype_process,
|
||||
path_to_subprocess_arg,
|
||||
|
|
@ -175,11 +177,13 @@ __all__ = [
|
|||
"emit_event",
|
||||
"register_event_callback",
|
||||
|
||||
"get_ayon_launcher_args",
|
||||
"get_openpype_execute_args",
|
||||
"get_linux_launcher_args",
|
||||
"execute",
|
||||
"run_subprocess",
|
||||
"run_detached_process",
|
||||
"run_ayon_launcher_process",
|
||||
"run_openpype_process",
|
||||
"clean_envs_for_openpype_process",
|
||||
"path_to_subprocess_arg",
|
||||
|
|
|
|||
|
|
@ -11,10 +11,7 @@ from abc import ABCMeta, abstractmethod
|
|||
|
||||
import six
|
||||
|
||||
from openpype.client import (
|
||||
get_project,
|
||||
get_asset_by_name,
|
||||
)
|
||||
from openpype import AYON_SERVER_ENABLED, PACKAGE_DIR
|
||||
from openpype.settings import (
|
||||
get_system_settings,
|
||||
get_project_settings,
|
||||
|
|
@ -46,6 +43,25 @@ CUSTOM_LAUNCH_APP_GROUPS = {
|
|||
}
|
||||
|
||||
|
||||
class LaunchTypes:
|
||||
"""Launch types are filters for pre/post-launch hooks.
|
||||
|
||||
Please use these variables in case they'll change values.
|
||||
"""
|
||||
|
||||
# Local launch - application is launched on local machine
|
||||
local = "local"
|
||||
# Farm render job - application is on farm
|
||||
farm_render = "farm-render"
|
||||
# Farm publish job - integration post-render job
|
||||
farm_publish = "farm-publish"
|
||||
# Remote launch - application is launched on remote machine from which
|
||||
# can be started publishing
|
||||
remote = "remote"
|
||||
# Automated launch - application is launched with automated publishing
|
||||
automated = "automated"
|
||||
|
||||
|
||||
def parse_environments(env_data, env_group=None, platform_name=None):
|
||||
"""Parse environment values from settings byt group and platform.
|
||||
|
||||
|
|
@ -482,6 +498,42 @@ class ApplicationManager:
|
|||
break
|
||||
return output
|
||||
|
||||
def create_launch_context(self, app_name, **data):
|
||||
"""Prepare launch context for application.
|
||||
|
||||
Args:
|
||||
app_name (str): Name of application that should be launched.
|
||||
**data (Any): Any additional data. Data may be used during
|
||||
|
||||
Returns:
|
||||
ApplicationLaunchContext: Launch context for application.
|
||||
|
||||
Raises:
|
||||
ApplicationNotFound: Application was not found by entered name.
|
||||
"""
|
||||
|
||||
app = self.applications.get(app_name)
|
||||
if not app:
|
||||
raise ApplicationNotFound(app_name)
|
||||
|
||||
executable = app.find_executable()
|
||||
|
||||
return ApplicationLaunchContext(
|
||||
app, executable, **data
|
||||
)
|
||||
|
||||
def launch_with_context(self, launch_context):
|
||||
"""Launch application using existing launch context.
|
||||
|
||||
Args:
|
||||
launch_context (ApplicationLaunchContext): Prepared launch
|
||||
context.
|
||||
"""
|
||||
|
||||
if not launch_context.executable:
|
||||
raise ApplictionExecutableNotFound(launch_context.application)
|
||||
return launch_context.launch()
|
||||
|
||||
def launch(self, app_name, **data):
|
||||
"""Launch procedure.
|
||||
|
||||
|
|
@ -502,18 +554,10 @@ class ApplicationManager:
|
|||
failed. Exception should contain explanation message,
|
||||
traceback should not be needed.
|
||||
"""
|
||||
app = self.applications.get(app_name)
|
||||
if not app:
|
||||
raise ApplicationNotFound(app_name)
|
||||
|
||||
executable = app.find_executable()
|
||||
if not executable:
|
||||
raise ApplictionExecutableNotFound(app)
|
||||
context = self.create_launch_context(app_name, **data)
|
||||
return self.launch_with_context(context)
|
||||
|
||||
context = ApplicationLaunchContext(
|
||||
app, executable, **data
|
||||
)
|
||||
return context.launch()
|
||||
|
||||
|
||||
class EnvironmentToolGroup:
|
||||
|
|
@ -735,13 +779,17 @@ class LaunchHook:
|
|||
# Order of prelaunch hook, will be executed as last if set to None.
|
||||
order = None
|
||||
# List of host implementations, skipped if empty.
|
||||
hosts = []
|
||||
# List of application groups
|
||||
app_groups = []
|
||||
# List of specific application names
|
||||
app_names = []
|
||||
# List of platform availability, skipped if empty.
|
||||
platforms = []
|
||||
hosts = set()
|
||||
# Set of application groups
|
||||
app_groups = set()
|
||||
# Set of specific application names
|
||||
app_names = set()
|
||||
# Set of platform availability
|
||||
platforms = set()
|
||||
# Set of launch types for which is available
|
||||
# - if empty then is available for all launch types
|
||||
# - by default has 'local' which is most common reason for launc hooks
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def __init__(self, launch_context):
|
||||
"""Constructor of launch hook.
|
||||
|
|
@ -789,6 +837,10 @@ class LaunchHook:
|
|||
if launch_context.app_name not in cls.app_names:
|
||||
return False
|
||||
|
||||
if cls.launch_types:
|
||||
if launch_context.launch_type not in cls.launch_types:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@property
|
||||
|
|
@ -858,9 +910,9 @@ class PostLaunchHook(LaunchHook):
|
|||
class ApplicationLaunchContext:
|
||||
"""Context of launching application.
|
||||
|
||||
Main purpose of context is to prepare launch arguments and keyword arguments
|
||||
for new process. Most important part of keyword arguments preparations
|
||||
are environment variables.
|
||||
Main purpose of context is to prepare launch arguments and keyword
|
||||
arguments for new process. Most important part of keyword arguments
|
||||
preparations are environment variables.
|
||||
|
||||
During the whole process is possible to use `data` attribute to store
|
||||
object usable in multiple places.
|
||||
|
|
@ -873,14 +925,30 @@ class ApplicationLaunchContext:
|
|||
insert argument between `nuke.exe` and `--NukeX`. To keep them together
|
||||
it is better to wrap them in another list: `[["nuke.exe", "--NukeX"]]`.
|
||||
|
||||
Notes:
|
||||
It is possible to use launch context only to prepare environment
|
||||
variables. In that case `executable` may be None and can be used
|
||||
'run_prelaunch_hooks' method to run prelaunch hooks which prepare
|
||||
them.
|
||||
|
||||
Args:
|
||||
application (Application): Application definition.
|
||||
executable (ApplicationExecutable): Object with path to executable.
|
||||
env_group (Optional[str]): Environment variable group. If not set
|
||||
'DEFAULT_ENV_SUBGROUP' is used.
|
||||
launch_type (Optional[str]): Launch type. If not set 'local' is used.
|
||||
**data (dict): Any additional data. Data may be used during
|
||||
preparation to store objects usable in multiple places.
|
||||
"""
|
||||
|
||||
def __init__(self, application, executable, env_group=None, **data):
|
||||
def __init__(
|
||||
self,
|
||||
application,
|
||||
executable,
|
||||
env_group=None,
|
||||
launch_type=None,
|
||||
**data
|
||||
):
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
# Application object
|
||||
|
|
@ -895,6 +963,10 @@ class ApplicationLaunchContext:
|
|||
|
||||
self.executable = executable
|
||||
|
||||
if launch_type is None:
|
||||
launch_type = LaunchTypes.local
|
||||
self.launch_type = launch_type
|
||||
|
||||
if env_group is None:
|
||||
env_group = DEFAULT_ENV_SUBGROUP
|
||||
|
||||
|
|
@ -902,8 +974,11 @@ class ApplicationLaunchContext:
|
|||
|
||||
self.data = dict(data)
|
||||
|
||||
launch_args = []
|
||||
if executable is not None:
|
||||
launch_args = executable.as_args()
|
||||
# subprocess.Popen launch arguments (first argument in constructor)
|
||||
self.launch_args = executable.as_args()
|
||||
self.launch_args = launch_args
|
||||
self.launch_args.extend(application.arguments)
|
||||
if self.data.get("app_args"):
|
||||
self.launch_args.extend(self.data.pop("app_args"))
|
||||
|
|
@ -945,6 +1020,7 @@ class ApplicationLaunchContext:
|
|||
self.postlaunch_hooks = None
|
||||
|
||||
self.process = None
|
||||
self._prelaunch_hooks_executed = False
|
||||
|
||||
@property
|
||||
def env(self):
|
||||
|
|
@ -1214,6 +1290,27 @@ class ApplicationLaunchContext:
|
|||
# Return process which is already terminated
|
||||
return process
|
||||
|
||||
def run_prelaunch_hooks(self):
|
||||
"""Run prelaunch hooks.
|
||||
|
||||
This method will be executed only once, any future calls will skip
|
||||
the processing.
|
||||
"""
|
||||
|
||||
if self._prelaunch_hooks_executed:
|
||||
self.log.warning("Prelaunch hooks were already executed.")
|
||||
return
|
||||
# Discover launch hooks
|
||||
self.discover_launch_hooks()
|
||||
|
||||
# Execute prelaunch hooks
|
||||
for prelaunch_hook in self.prelaunch_hooks:
|
||||
self.log.debug("Executing prelaunch hook: {}".format(
|
||||
str(prelaunch_hook.__class__.__name__)
|
||||
))
|
||||
prelaunch_hook.execute()
|
||||
self._prelaunch_hooks_executed = True
|
||||
|
||||
def launch(self):
|
||||
"""Collect data for new process and then create it.
|
||||
|
||||
|
|
@ -1226,15 +1323,8 @@ class ApplicationLaunchContext:
|
|||
self.log.warning("Application was already launched.")
|
||||
return
|
||||
|
||||
# Discover launch hooks
|
||||
self.discover_launch_hooks()
|
||||
|
||||
# Execute prelaunch hooks
|
||||
for prelaunch_hook in self.prelaunch_hooks:
|
||||
self.log.debug("Executing prelaunch hook: {}".format(
|
||||
str(prelaunch_hook.__class__.__name__)
|
||||
))
|
||||
prelaunch_hook.execute()
|
||||
if not self._prelaunch_hooks_executed:
|
||||
self.run_prelaunch_hooks()
|
||||
|
||||
self.log.debug("All prelaunch hook executed. Starting new process.")
|
||||
|
||||
|
|
@ -1352,6 +1442,7 @@ def get_app_environments_for_context(
|
|||
task_name,
|
||||
app_name,
|
||||
env_group=None,
|
||||
launch_type=None,
|
||||
env=None,
|
||||
modules_manager=None
|
||||
):
|
||||
|
|
@ -1362,54 +1453,33 @@ def get_app_environments_for_context(
|
|||
task_name (str): Name of task.
|
||||
app_name (str): Name of application that is launched and can be found
|
||||
by ApplicationManager.
|
||||
env (dict): Initial environment variables. `os.environ` is used when
|
||||
not passed.
|
||||
modules_manager (ModulesManager): Initialized modules manager.
|
||||
env_group (Optional[str]): Name of environment group. If not passed
|
||||
default group is used.
|
||||
launch_type (Optional[str]): Type for which prelaunch hooks are
|
||||
executed.
|
||||
env (Optional[dict[str, str]]): Initial environment variables.
|
||||
`os.environ` is used when not passed.
|
||||
modules_manager (Optional[ModulesManager]): Initialized modules
|
||||
manager.
|
||||
|
||||
Returns:
|
||||
dict: Environments for passed context and application.
|
||||
"""
|
||||
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype.pipeline import Anatomy
|
||||
from openpype.lib.openpype_version import is_running_staging
|
||||
|
||||
# Project document
|
||||
project_doc = get_project(project_name)
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
|
||||
if modules_manager is None:
|
||||
modules_manager = ModulesManager()
|
||||
|
||||
# Prepare app object which can be obtained only from ApplciationManager
|
||||
# Prepare app object which can be obtained only from ApplicationManager
|
||||
app_manager = ApplicationManager()
|
||||
app = app_manager.applications[app_name]
|
||||
|
||||
# Project's anatomy
|
||||
anatomy = Anatomy(project_name)
|
||||
|
||||
data = EnvironmentPrepData({
|
||||
"project_name": project_name,
|
||||
"asset_name": asset_name,
|
||||
"task_name": task_name,
|
||||
|
||||
"app": app,
|
||||
|
||||
"project_doc": project_doc,
|
||||
"asset_doc": asset_doc,
|
||||
|
||||
"anatomy": anatomy,
|
||||
|
||||
"env": env
|
||||
})
|
||||
data["env"].update(anatomy.root_environments())
|
||||
if is_running_staging():
|
||||
data["env"]["OPENPYPE_IS_STAGING"] = "1"
|
||||
|
||||
prepare_app_environments(data, env_group, modules_manager)
|
||||
prepare_context_environments(data, env_group, modules_manager)
|
||||
|
||||
return data["env"]
|
||||
context = app_manager.create_launch_context(
|
||||
app_name,
|
||||
project_name=project_name,
|
||||
asset_name=asset_name,
|
||||
task_name=task_name,
|
||||
env_group=env_group,
|
||||
launch_type=launch_type,
|
||||
env=env,
|
||||
modules_manager=modules_manager,
|
||||
)
|
||||
context.run_prelaunch_hooks()
|
||||
return context.env
|
||||
|
||||
|
||||
def _merge_env(env, current_env):
|
||||
|
|
@ -1435,10 +1505,8 @@ def _add_python_version_paths(app, env, logger, modules_manager):
|
|||
return
|
||||
|
||||
# Add Python 2/3 modules
|
||||
openpype_root = os.getenv("OPENPYPE_REPOS_ROOT")
|
||||
python_vendor_dir = os.path.join(
|
||||
openpype_root,
|
||||
"openpype",
|
||||
PACKAGE_DIR,
|
||||
"vendor",
|
||||
"python"
|
||||
)
|
||||
|
|
@ -1640,11 +1708,7 @@ def prepare_context_environments(data, env_group=None, modules_manager=None):
|
|||
project_doc = data["project_doc"]
|
||||
asset_doc = data["asset_doc"]
|
||||
task_name = data["task_name"]
|
||||
if (
|
||||
not project_doc
|
||||
or not asset_doc
|
||||
or not task_name
|
||||
):
|
||||
if not project_doc:
|
||||
log.info(
|
||||
"Skipping context environments preparation."
|
||||
" Launch context does not contain required data."
|
||||
|
|
@ -1657,18 +1721,16 @@ def prepare_context_environments(data, env_group=None, modules_manager=None):
|
|||
system_settings = get_system_settings()
|
||||
data["project_settings"] = project_settings
|
||||
data["system_settings"] = system_settings
|
||||
# Apply project specific environments on current env value
|
||||
apply_project_environments_value(
|
||||
project_name, data["env"], project_settings, env_group
|
||||
)
|
||||
|
||||
app = data["app"]
|
||||
context_env = {
|
||||
"AVALON_PROJECT": project_doc["name"],
|
||||
"AVALON_ASSET": asset_doc["name"],
|
||||
"AVALON_TASK": task_name,
|
||||
"AVALON_APP_NAME": app.full_name
|
||||
}
|
||||
if asset_doc:
|
||||
context_env["AVALON_ASSET"] = asset_doc["name"]
|
||||
if task_name:
|
||||
context_env["AVALON_TASK"] = task_name
|
||||
|
||||
log.debug(
|
||||
"Context environments set:\n{}".format(
|
||||
|
|
@ -1676,9 +1738,25 @@ def prepare_context_environments(data, env_group=None, modules_manager=None):
|
|||
)
|
||||
)
|
||||
data["env"].update(context_env)
|
||||
|
||||
# Apply project specific environments on current env value
|
||||
# - apply them once the context environments are set
|
||||
apply_project_environments_value(
|
||||
project_name, data["env"], project_settings, env_group
|
||||
)
|
||||
|
||||
if not app.is_host:
|
||||
return
|
||||
|
||||
data["env"]["AVALON_APP"] = app.host_name
|
||||
|
||||
if not asset_doc or not task_name:
|
||||
# QUESTION replace with log.info and skip workfile discovery?
|
||||
# - technically it should be possible to launch host without context
|
||||
raise ApplicationLaunchFailed(
|
||||
"Host launch require asset and task context."
|
||||
)
|
||||
|
||||
workdir_data = get_template_data(
|
||||
project_doc, asset_doc, task_name, app.host_name, system_settings
|
||||
)
|
||||
|
|
@ -1716,7 +1794,6 @@ def prepare_context_environments(data, env_group=None, modules_manager=None):
|
|||
"Couldn't create workdir because: {}".format(str(exc))
|
||||
)
|
||||
|
||||
data["env"]["AVALON_APP"] = app.host_name
|
||||
data["env"]["AVALON_WORKDIR"] = workdir
|
||||
|
||||
_prepare_last_workfile(data, workdir, modules_manager)
|
||||
|
|
@ -1950,17 +2027,28 @@ def get_non_python_host_kwargs(kwargs, allow_console=True):
|
|||
allow_console (bool): use False for inner Popen opening app itself or
|
||||
it will open additional console (at least for Harmony)
|
||||
"""
|
||||
|
||||
if kwargs is None:
|
||||
kwargs = {}
|
||||
|
||||
if platform.system().lower() != "windows":
|
||||
return kwargs
|
||||
|
||||
executable_path = os.environ.get("OPENPYPE_EXECUTABLE")
|
||||
if AYON_SERVER_ENABLED:
|
||||
executable_path = os.environ.get("AYON_EXECUTABLE")
|
||||
else:
|
||||
executable_path = os.environ.get("OPENPYPE_EXECUTABLE")
|
||||
|
||||
executable_filename = ""
|
||||
if executable_path:
|
||||
executable_filename = os.path.basename(executable_path)
|
||||
if "openpype_gui" in executable_filename:
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
is_gui_executable = "ayon_console" not in executable_filename
|
||||
else:
|
||||
is_gui_executable = "openpype_gui" in executable_filename
|
||||
|
||||
if is_gui_executable:
|
||||
kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NO_WINDOW,
|
||||
"stdout": subprocess.DEVNULL,
|
||||
|
|
|
|||
|
|
@ -164,12 +164,19 @@ def run_subprocess(*args, **kwargs):
|
|||
return full_output
|
||||
|
||||
|
||||
def clean_envs_for_openpype_process(env=None):
|
||||
"""Modify environments that may affect OpenPype process.
|
||||
def clean_envs_for_ayon_process(env=None):
|
||||
"""Modify environments that may affect ayon-launcher process.
|
||||
|
||||
Main reason to implement this function is to pop PYTHONPATH which may be
|
||||
affected by in-host environments.
|
||||
|
||||
Args:
|
||||
env (Optional[dict[str, str]]): Environment variables to modify.
|
||||
|
||||
Returns:
|
||||
dict[str, str]: Environment variables for ayon process.
|
||||
"""
|
||||
|
||||
if env is None:
|
||||
env = os.environ
|
||||
|
||||
|
|
@ -181,6 +188,64 @@ def clean_envs_for_openpype_process(env=None):
|
|||
return env
|
||||
|
||||
|
||||
def clean_envs_for_openpype_process(env=None):
|
||||
"""Modify environments that may affect OpenPype process.
|
||||
|
||||
Main reason to implement this function is to pop PYTHONPATH which may be
|
||||
affected by in-host environments.
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
return clean_envs_for_ayon_process(env=env)
|
||||
|
||||
if env is None:
|
||||
env = os.environ
|
||||
|
||||
# Exclude some environment variables from a copy of the environment
|
||||
env = env.copy()
|
||||
for key in ["PYTHONPATH", "PYTHONHOME"]:
|
||||
env.pop(key, None)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def run_ayon_launcher_process(*args, **kwargs):
|
||||
"""Execute OpenPype process with passed arguments and wait.
|
||||
|
||||
Wrapper for 'run_process' which prepends OpenPype executable arguments
|
||||
before passed arguments and define environments if are not passed.
|
||||
|
||||
Values from 'os.environ' are used for environments if are not passed.
|
||||
They are cleaned using 'clean_envs_for_openpype_process' function.
|
||||
|
||||
Example:
|
||||
```
|
||||
run_ayon_process("run", "<path to .py script>")
|
||||
```
|
||||
|
||||
Args:
|
||||
*args (str): ayon-launcher cli arguments.
|
||||
**kwargs (Any): Keyword arguments for subprocess.Popen.
|
||||
|
||||
Returns:
|
||||
str: Full output of subprocess concatenated stdout and stderr.
|
||||
"""
|
||||
|
||||
args = get_ayon_launcher_args(*args)
|
||||
env = kwargs.pop("env", None)
|
||||
# Keep env untouched if are passed and not empty
|
||||
if not env:
|
||||
# Skip envs that can affect OpenPype process
|
||||
# - fill more if you find more
|
||||
env = clean_envs_for_openpype_process(os.environ)
|
||||
|
||||
# Only keep OpenPype version if we are running from build.
|
||||
if not is_running_from_build():
|
||||
env.pop("OPENPYPE_VERSION", None)
|
||||
|
||||
return run_subprocess(args, env=env, **kwargs)
|
||||
|
||||
|
||||
def run_openpype_process(*args, **kwargs):
|
||||
"""Execute OpenPype process with passed arguments and wait.
|
||||
|
||||
|
|
@ -191,14 +256,16 @@ def run_openpype_process(*args, **kwargs):
|
|||
They are cleaned using 'clean_envs_for_openpype_process' function.
|
||||
|
||||
Example:
|
||||
```
|
||||
run_detached_process("run", "<path to .py script>")
|
||||
```
|
||||
>>> run_openpype_process("version")
|
||||
|
||||
Args:
|
||||
*args (tuple): OpenPype cli arguments.
|
||||
**kwargs (dict): Keyword arguments for for subprocess.Popen.
|
||||
**kwargs (dict): Keyword arguments for subprocess.Popen.
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
return run_ayon_launcher_process(*args, **kwargs)
|
||||
|
||||
args = get_openpype_execute_args(*args)
|
||||
env = kwargs.pop("env", None)
|
||||
# Keep env untouched if are passed and not empty
|
||||
|
|
@ -221,18 +288,18 @@ def run_detached_process(args, **kwargs):
|
|||
They are cleaned using 'clean_envs_for_openpype_process' function.
|
||||
|
||||
Example:
|
||||
```
|
||||
run_detached_openpype_process("run", "<path to .py script>")
|
||||
```
|
||||
>>> run_detached_process("run", "./path_to.py")
|
||||
|
||||
|
||||
Args:
|
||||
*args (tuple): OpenPype cli arguments.
|
||||
**kwargs (dict): Keyword arguments for for subprocess.Popen.
|
||||
**kwargs (dict): Keyword arguments for subprocess.Popen.
|
||||
|
||||
Returns:
|
||||
subprocess.Popen: Pointer to launched process but it is possible that
|
||||
launched process is already killed (on linux).
|
||||
"""
|
||||
|
||||
env = kwargs.pop("env", None)
|
||||
# Keep env untouched if are passed and not empty
|
||||
if not env:
|
||||
|
|
@ -296,6 +363,39 @@ def path_to_subprocess_arg(path):
|
|||
return subprocess.list2cmdline([path])
|
||||
|
||||
|
||||
def get_ayon_launcher_args(*args):
|
||||
"""Arguments to run ayon-launcher process.
|
||||
|
||||
Arguments for subprocess when need to spawn new pype process. Which may be
|
||||
needed when new python process for pype scripts must be executed in build
|
||||
pype.
|
||||
|
||||
Reasons:
|
||||
Ayon-launcher started from code has different executable set to
|
||||
virtual env python and must have path to script as first argument
|
||||
which is not needed for built application.
|
||||
|
||||
Args:
|
||||
*args (str): Any arguments that will be added after executables.
|
||||
|
||||
Returns:
|
||||
list[str]: List of arguments to run ayon-launcher process.
|
||||
"""
|
||||
|
||||
executable = os.environ["AYON_EXECUTABLE"]
|
||||
launch_args = [executable]
|
||||
|
||||
executable_filename = os.path.basename(executable)
|
||||
if "python" in executable_filename.lower():
|
||||
filepath = os.path.join(os.environ["AYON_ROOT"], "start.py")
|
||||
launch_args.append(filepath)
|
||||
|
||||
if args:
|
||||
launch_args.extend(args)
|
||||
|
||||
return launch_args
|
||||
|
||||
|
||||
def get_openpype_execute_args(*args):
|
||||
"""Arguments to run pype command.
|
||||
|
||||
|
|
@ -311,17 +411,17 @@ def get_openpype_execute_args(*args):
|
|||
It is possible to pass any arguments that will be added after pype
|
||||
executables.
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
return get_ayon_launcher_args(*args)
|
||||
|
||||
executable = os.environ["OPENPYPE_EXECUTABLE"]
|
||||
launch_args = [executable]
|
||||
|
||||
executable_filename = os.path.basename(executable)
|
||||
if "python" in executable_filename.lower():
|
||||
filename = "start.py"
|
||||
if AYON_SERVER_ENABLED:
|
||||
filename = "ayon_start.py"
|
||||
launch_args.append(
|
||||
os.path.join(os.environ["OPENPYPE_ROOT"], filename)
|
||||
)
|
||||
filepath = os.path.join(os.environ["OPENPYPE_ROOT"], "start.py")
|
||||
launch_args.append(filepath)
|
||||
|
||||
if args:
|
||||
launch_args.extend(args)
|
||||
|
|
@ -338,6 +438,9 @@ def get_linux_launcher_args(*args):
|
|||
It is possible that this function is used in OpenPype build which does
|
||||
not have yet the new executable. In that case 'None' is returned.
|
||||
|
||||
Todos:
|
||||
Replace by script in scripts for ayon-launcher.
|
||||
|
||||
Args:
|
||||
args (iterable): List of additional arguments added after executable
|
||||
argument.
|
||||
|
|
@ -346,19 +449,24 @@ def get_linux_launcher_args(*args):
|
|||
list: Executables with possible positional argument to script when
|
||||
called from code.
|
||||
"""
|
||||
filename = "app_launcher"
|
||||
openpype_executable = os.environ["OPENPYPE_EXECUTABLE"]
|
||||
|
||||
executable_filename = os.path.basename(openpype_executable)
|
||||
filename = "app_launcher"
|
||||
if AYON_SERVER_ENABLED:
|
||||
executable = os.environ["AYON_EXECUTABLE"]
|
||||
else:
|
||||
executable = os.environ["OPENPYPE_EXECUTABLE"]
|
||||
|
||||
executable_filename = os.path.basename(executable)
|
||||
if "python" in executable_filename.lower():
|
||||
script_path = os.path.join(
|
||||
os.environ["OPENPYPE_ROOT"],
|
||||
"{}.py".format(filename)
|
||||
)
|
||||
launch_args = [openpype_executable, script_path]
|
||||
if AYON_SERVER_ENABLED:
|
||||
root = os.environ["AYON_ROOT"]
|
||||
else:
|
||||
root = os.environ["OPENPYPE_ROOT"]
|
||||
script_path = os.path.join(root, "{}.py".format(filename))
|
||||
launch_args = [executable, script_path]
|
||||
else:
|
||||
new_executable = os.path.join(
|
||||
os.path.dirname(openpype_executable),
|
||||
os.path.dirname(executable),
|
||||
filename
|
||||
)
|
||||
executable_path = find_executable(new_executable)
|
||||
|
|
|
|||
|
|
@ -26,8 +26,25 @@ def get_openpype_version():
|
|||
return openpype.version.__version__
|
||||
|
||||
|
||||
def get_ayon_launcher_version():
|
||||
version_filepath = os.path.join(
|
||||
os.environ["AYON_ROOT"],
|
||||
"version.py"
|
||||
)
|
||||
if not os.path.exists(version_filepath):
|
||||
return None
|
||||
content = {}
|
||||
with open(version_filepath, "r") as stream:
|
||||
exec(stream.read(), content)
|
||||
return content["__version__"]
|
||||
|
||||
|
||||
def get_build_version():
|
||||
"""OpenPype version of build."""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
return get_ayon_launcher_version()
|
||||
|
||||
# Return OpenPype version if is running from code
|
||||
if not is_running_from_build():
|
||||
return get_openpype_version()
|
||||
|
|
@ -51,7 +68,11 @@ def is_running_from_build():
|
|||
Returns:
|
||||
bool: True if running from build.
|
||||
"""
|
||||
executable_path = os.environ["OPENPYPE_EXECUTABLE"]
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
executable_path = os.environ["AYON_EXECUTABLE"]
|
||||
else:
|
||||
executable_path = os.environ["OPENPYPE_EXECUTABLE"]
|
||||
executable_filename = os.path.basename(executable_path)
|
||||
if "python" in executable_filename.lower():
|
||||
return False
|
||||
|
|
@ -59,6 +80,8 @@ def is_running_from_build():
|
|||
|
||||
|
||||
def is_staging_enabled():
|
||||
if AYON_SERVER_ENABLED:
|
||||
return os.getenv("AYON_USE_STAGING") == "1"
|
||||
return os.environ.get("OPENPYPE_USE_STAGING") == "1"
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -334,6 +334,9 @@ def get_usd_master_path(asset, subset, representation):
|
|||
"name": project_name,
|
||||
"code": project_doc.get("data", {}).get("code")
|
||||
},
|
||||
"folder": {
|
||||
"name": asset_doc["name"],
|
||||
},
|
||||
"asset": asset_doc["name"],
|
||||
"subset": subset,
|
||||
"representation": representation,
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue