mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-02 00:44:52 +01:00
Merge branch 'develop' into enhancement/AY-970_abc-options-for-Pointcache-Animation-family
This commit is contained in:
commit
14bf4415bf
326 changed files with 4471 additions and 3396 deletions
|
|
@ -0,0 +1,59 @@
|
|||
from .constants import (
|
||||
APPLICATIONS_ADDON_ROOT,
|
||||
DEFAULT_ENV_SUBGROUP,
|
||||
PLATFORM_NAMES,
|
||||
)
|
||||
from .exceptions import (
|
||||
ApplicationNotFound,
|
||||
ApplicationExecutableNotFound,
|
||||
ApplicationLaunchFailed,
|
||||
MissingRequiredKey,
|
||||
)
|
||||
from .defs import (
|
||||
LaunchTypes,
|
||||
ApplicationExecutable,
|
||||
UndefinedApplicationExecutable,
|
||||
ApplicationGroup,
|
||||
Application,
|
||||
EnvironmentToolGroup,
|
||||
EnvironmentTool,
|
||||
)
|
||||
from .hooks import (
|
||||
LaunchHook,
|
||||
PreLaunchHook,
|
||||
PostLaunchHook,
|
||||
)
|
||||
from .manager import (
|
||||
ApplicationManager,
|
||||
ApplicationLaunchContext,
|
||||
)
|
||||
from .addon import ApplicationsAddon
|
||||
|
||||
|
||||
__all__ = (
|
||||
"APPLICATIONS_ADDON_ROOT",
|
||||
"DEFAULT_ENV_SUBGROUP",
|
||||
"PLATFORM_NAMES",
|
||||
|
||||
"ApplicationNotFound",
|
||||
"ApplicationExecutableNotFound",
|
||||
"ApplicationLaunchFailed",
|
||||
"MissingRequiredKey",
|
||||
|
||||
"LaunchTypes",
|
||||
"ApplicationExecutable",
|
||||
"UndefinedApplicationExecutable",
|
||||
"ApplicationGroup",
|
||||
"Application",
|
||||
"EnvironmentToolGroup",
|
||||
"EnvironmentTool",
|
||||
|
||||
"LaunchHook",
|
||||
"PreLaunchHook",
|
||||
"PostLaunchHook",
|
||||
|
||||
"ApplicationManager",
|
||||
"ApplicationLaunchContext",
|
||||
|
||||
"ApplicationsAddon",
|
||||
)
|
||||
173
server_addon/applications/client/ayon_applications/addon.py
Normal file
173
server_addon/applications/client/ayon_applications/addon.py
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
from ayon_core.addon import AYONAddon, IPluginPaths, click_wrap
|
||||
|
||||
from .constants import APPLICATIONS_ADDON_ROOT
|
||||
from .defs import LaunchTypes
|
||||
from .manager import ApplicationManager
|
||||
|
||||
|
||||
class ApplicationsAddon(AYONAddon, IPluginPaths):
|
||||
name = "applications"
|
||||
|
||||
def initialize(self, settings):
|
||||
# TODO remove when addon is removed from ayon-core
|
||||
self.enabled = self.name in settings
|
||||
|
||||
def get_app_environments_for_context(
|
||||
self,
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
full_app_name,
|
||||
env_group=None,
|
||||
launch_type=None,
|
||||
env=None,
|
||||
):
|
||||
"""Calculate environment variables for launch context.
|
||||
|
||||
Args:
|
||||
project_name (str): Project name.
|
||||
folder_path (str): Folder path.
|
||||
task_name (str): Task name.
|
||||
full_app_name (str): Full application name.
|
||||
env_group (Optional[str]): Environment group.
|
||||
launch_type (Optional[str]): Launch type.
|
||||
env (Optional[dict[str, str]]): Environment variables to update.
|
||||
|
||||
Returns:
|
||||
dict[str, str]: Environment variables for context.
|
||||
|
||||
"""
|
||||
from ayon_applications.utils import get_app_environments_for_context
|
||||
|
||||
if not full_app_name:
|
||||
return {}
|
||||
|
||||
return get_app_environments_for_context(
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
full_app_name,
|
||||
env_group=env_group,
|
||||
launch_type=launch_type,
|
||||
env=env,
|
||||
addons_manager=self.manager
|
||||
)
|
||||
|
||||
def get_farm_publish_environment_variables(
|
||||
self,
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
full_app_name=None,
|
||||
env_group=None,
|
||||
):
|
||||
"""Calculate environment variables for farm publish.
|
||||
|
||||
Args:
|
||||
project_name (str): Project name.
|
||||
folder_path (str): Folder path.
|
||||
task_name (str): Task name.
|
||||
env_group (Optional[str]): Environment group.
|
||||
full_app_name (Optional[str]): Full application name. Value from
|
||||
environment variable 'AYON_APP_NAME' is used if 'None' is
|
||||
passed.
|
||||
|
||||
Returns:
|
||||
dict[str, str]: Environment variables for farm publish.
|
||||
|
||||
"""
|
||||
if full_app_name is None:
|
||||
full_app_name = os.getenv("AYON_APP_NAME")
|
||||
|
||||
return self.get_app_environments_for_context(
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
full_app_name,
|
||||
env_group=env_group,
|
||||
launch_type=LaunchTypes.farm_publish
|
||||
)
|
||||
|
||||
def get_applications_manager(self, settings=None):
|
||||
"""Get applications manager.
|
||||
|
||||
Args:
|
||||
settings (Optional[dict]): Studio/project settings.
|
||||
|
||||
Returns:
|
||||
ApplicationManager: Applications manager.
|
||||
|
||||
"""
|
||||
return ApplicationManager(settings)
|
||||
|
||||
def get_plugin_paths(self):
|
||||
return {
|
||||
"publish": [
|
||||
os.path.join(APPLICATIONS_ADDON_ROOT, "plugins", "publish")
|
||||
]
|
||||
}
|
||||
|
||||
# --- CLI ---
|
||||
def cli(self, addon_click_group):
|
||||
main_group = click_wrap.group(
|
||||
self._cli_main, name=self.name, help="Applications addon"
|
||||
)
|
||||
(
|
||||
main_group.command(
|
||||
self._cli_extract_environments,
|
||||
name="extractenvironments",
|
||||
help=(
|
||||
"Extract environment variables for context into json file"
|
||||
)
|
||||
)
|
||||
.argument("output_json_path")
|
||||
.option("--project", help="Project name", default=None)
|
||||
.option("--folder", help="Folder path", default=None)
|
||||
.option("--task", help="Task name", default=None)
|
||||
.option("--app", help="Application name", default=None)
|
||||
.option(
|
||||
"--envgroup",
|
||||
help="Environment group (e.g. \"farm\")",
|
||||
default=None
|
||||
)
|
||||
)
|
||||
# Convert main command to click object and add it to parent group
|
||||
addon_click_group.add_command(
|
||||
main_group.to_click_obj()
|
||||
)
|
||||
|
||||
def _cli_main(self):
|
||||
pass
|
||||
|
||||
def _cli_extract_environments(
|
||||
self, output_json_path, project, folder, task, app, envgroup
|
||||
):
|
||||
"""Produces json file with environment based on project and app.
|
||||
|
||||
Called by farm integration to propagate environment into farm jobs.
|
||||
|
||||
Args:
|
||||
output_json_path (str): Output json file path.
|
||||
project (str): Project name.
|
||||
folder (str): Folder path.
|
||||
task (str): Task name.
|
||||
app (str): Full application name e.g. 'maya/2024'.
|
||||
envgroup (str): Environment group.
|
||||
|
||||
"""
|
||||
if all((project, folder, task, app)):
|
||||
env = self.get_farm_publish_environment_variables(
|
||||
project, folder, task, app, env_group=envgroup,
|
||||
)
|
||||
else:
|
||||
env = os.environ.copy()
|
||||
|
||||
output_dir = os.path.dirname(output_json_path)
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
with open(output_json_path, "w") as file_stream:
|
||||
json.dump(env, file_stream, indent=4)
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
import os
|
||||
|
||||
APPLICATIONS_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
PLATFORM_NAMES = {"windows", "linux", "darwin"}
|
||||
DEFAULT_ENV_SUBGROUP = "standard"
|
||||
404
server_addon/applications/client/ayon_applications/defs.py
Normal file
404
server_addon/applications/client/ayon_applications/defs.py
Normal file
|
|
@ -0,0 +1,404 @@
|
|||
import os
|
||||
import platform
|
||||
import json
|
||||
import copy
|
||||
|
||||
from ayon_core.lib import find_executable
|
||||
|
||||
|
||||
class LaunchTypes:
|
||||
"""Launch types are filters for pre/post-launch hooks.
|
||||
|
||||
Please use these variables in case they'll change values.
|
||||
"""
|
||||
|
||||
# Local launch - application is launched on local machine
|
||||
local = "local"
|
||||
# Farm render job - application is on farm
|
||||
farm_render = "farm-render"
|
||||
# Farm publish job - integration post-render job
|
||||
farm_publish = "farm-publish"
|
||||
# Remote launch - application is launched on remote machine from which
|
||||
# can be started publishing
|
||||
remote = "remote"
|
||||
# Automated launch - application is launched with automated publishing
|
||||
automated = "automated"
|
||||
|
||||
|
||||
class ApplicationExecutable:
|
||||
"""Representation of executable loaded from settings."""
|
||||
|
||||
def __init__(self, executable):
|
||||
# Try to format executable with environments
|
||||
try:
|
||||
executable = executable.format(**os.environ)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# On MacOS check if exists path to executable when ends with `.app`
|
||||
# - it is common that path will lead to "/Applications/Blender" but
|
||||
# real path is "/Applications/Blender.app"
|
||||
if platform.system().lower() == "darwin":
|
||||
executable = self.macos_executable_prep(executable)
|
||||
|
||||
self.executable_path = executable
|
||||
|
||||
def __str__(self):
|
||||
return self.executable_path
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}> {}".format(self.__class__.__name__, self.executable_path)
|
||||
|
||||
@staticmethod
|
||||
def macos_executable_prep(executable):
|
||||
"""Try to find full path to executable file.
|
||||
|
||||
Real executable is stored in '*.app/Contents/MacOS/<executable>'.
|
||||
|
||||
Having path to '*.app' gives ability to read it's plist info and
|
||||
use "CFBundleExecutable" key from plist to know what is "executable."
|
||||
|
||||
Plist is stored in '*.app/Contents/Info.plist'.
|
||||
|
||||
This is because some '*.app' directories don't have same permissions
|
||||
as real executable.
|
||||
"""
|
||||
# Try to find if there is `.app` file
|
||||
if not os.path.exists(executable):
|
||||
_executable = executable + ".app"
|
||||
if os.path.exists(_executable):
|
||||
executable = _executable
|
||||
|
||||
# Try to find real executable if executable has `Contents` subfolder
|
||||
contents_dir = os.path.join(executable, "Contents")
|
||||
if os.path.exists(contents_dir):
|
||||
executable_filename = None
|
||||
# Load plist file and check for bundle executable
|
||||
plist_filepath = os.path.join(contents_dir, "Info.plist")
|
||||
if os.path.exists(plist_filepath):
|
||||
import plistlib
|
||||
|
||||
if hasattr(plistlib, "load"):
|
||||
with open(plist_filepath, "rb") as stream:
|
||||
parsed_plist = plistlib.load(stream)
|
||||
else:
|
||||
parsed_plist = plistlib.readPlist(plist_filepath)
|
||||
executable_filename = parsed_plist.get("CFBundleExecutable")
|
||||
|
||||
if executable_filename:
|
||||
executable = os.path.join(
|
||||
contents_dir, "MacOS", executable_filename
|
||||
)
|
||||
|
||||
return executable
|
||||
|
||||
def as_args(self):
|
||||
return [self.executable_path]
|
||||
|
||||
def _realpath(self):
|
||||
"""Check if path is valid executable path."""
|
||||
# Check for executable in PATH
|
||||
result = find_executable(self.executable_path)
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
# This is not 100% validation but it is better than remove ability to
|
||||
# launch .bat, .sh or extentionless files
|
||||
if os.path.exists(self.executable_path):
|
||||
return self.executable_path
|
||||
return None
|
||||
|
||||
def exists(self):
|
||||
if not self.executable_path:
|
||||
return False
|
||||
return bool(self._realpath())
|
||||
|
||||
|
||||
class UndefinedApplicationExecutable(ApplicationExecutable):
|
||||
"""Some applications do not require executable path from settings.
|
||||
|
||||
In that case this class is used to "fake" existing executable.
|
||||
"""
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __str__(self):
|
||||
return self.__class__.__name__
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}>".format(self.__class__.__name__)
|
||||
|
||||
def as_args(self):
|
||||
return []
|
||||
|
||||
def exists(self):
|
||||
return True
|
||||
|
||||
|
||||
class ApplicationGroup:
|
||||
"""Hold information about application group.
|
||||
|
||||
Application group wraps different versions(variants) of application.
|
||||
e.g. "maya" is group and "maya_2020" is variant.
|
||||
|
||||
Group hold `host_name` which is implementation name used in AYON. Also
|
||||
holds `enabled` if whole app group is enabled or `icon` for application
|
||||
icon path in resources.
|
||||
|
||||
Group has also `environment` which hold same environments for all variants.
|
||||
|
||||
Args:
|
||||
name (str): Groups' name.
|
||||
data (dict): Group defying data loaded from settings.
|
||||
manager (ApplicationManager): Manager that created the group.
|
||||
"""
|
||||
|
||||
def __init__(self, name, data, manager):
|
||||
self.name = name
|
||||
self.manager = manager
|
||||
self._data = data
|
||||
|
||||
self.enabled = data["enabled"]
|
||||
self.label = data["label"] or None
|
||||
self.icon = data["icon"] or None
|
||||
env = {}
|
||||
try:
|
||||
env = json.loads(data["environment"])
|
||||
except Exception:
|
||||
pass
|
||||
self._environment = env
|
||||
|
||||
host_name = data["host_name"] or None
|
||||
self.is_host = host_name is not None
|
||||
self.host_name = host_name
|
||||
|
||||
settings_variants = data["variants"]
|
||||
variants = {}
|
||||
for variant_data in settings_variants:
|
||||
app_variant = Application(variant_data, self)
|
||||
variants[app_variant.name] = app_variant
|
||||
|
||||
self.variants = variants
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}> - {}".format(self.__class__.__name__, self.name)
|
||||
|
||||
def __iter__(self):
|
||||
for variant in self.variants.values():
|
||||
yield variant
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
return copy.deepcopy(self._environment)
|
||||
|
||||
|
||||
class Application:
|
||||
"""Hold information about application.
|
||||
|
||||
Object by itself does nothing special.
|
||||
|
||||
Args:
|
||||
data (dict): Data for the version containing information about
|
||||
executables, variant label or if is enabled.
|
||||
Only required key is `executables`.
|
||||
group (ApplicationGroup): App group object that created the application
|
||||
and under which application belongs.
|
||||
|
||||
"""
|
||||
def __init__(self, data, group):
|
||||
self._data = data
|
||||
name = data["name"]
|
||||
label = data["label"] or name
|
||||
enabled = False
|
||||
if group.enabled:
|
||||
enabled = data.get("enabled", True)
|
||||
|
||||
if group.label:
|
||||
full_label = " ".join((group.label, label))
|
||||
else:
|
||||
full_label = label
|
||||
env = {}
|
||||
try:
|
||||
env = json.loads(data["environment"])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
arguments = data["arguments"]
|
||||
if isinstance(arguments, dict):
|
||||
arguments = arguments.get(platform.system().lower())
|
||||
|
||||
if not arguments:
|
||||
arguments = []
|
||||
|
||||
_executables = data["executables"].get(platform.system().lower(), [])
|
||||
executables = [
|
||||
ApplicationExecutable(executable)
|
||||
for executable in _executables
|
||||
]
|
||||
|
||||
self.group = group
|
||||
|
||||
self.name = name
|
||||
self.label = label
|
||||
self.enabled = enabled
|
||||
self.use_python_2 = data.get("use_python_2", False)
|
||||
|
||||
self.full_name = "/".join((group.name, name))
|
||||
self.full_label = full_label
|
||||
self.arguments = arguments
|
||||
self.executables = executables
|
||||
self._environment = env
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}> - {}".format(self.__class__.__name__, self.full_name)
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
return copy.deepcopy(self._environment)
|
||||
|
||||
@property
|
||||
def manager(self):
|
||||
return self.group.manager
|
||||
|
||||
@property
|
||||
def host_name(self):
|
||||
return self.group.host_name
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
return self.group.icon
|
||||
|
||||
@property
|
||||
def is_host(self):
|
||||
return self.group.is_host
|
||||
|
||||
def find_executable(self):
|
||||
"""Try to find existing executable for application.
|
||||
|
||||
Returns (str): Path to executable from `executables` or None if any
|
||||
exists.
|
||||
"""
|
||||
for executable in self.executables:
|
||||
if executable.exists():
|
||||
return executable
|
||||
return None
|
||||
|
||||
def launch(self, *args, **kwargs):
|
||||
"""Launch the application.
|
||||
|
||||
For this purpose is used manager's launch method to keep logic at one
|
||||
place.
|
||||
|
||||
Arguments must match with manager's launch method. That's why *args
|
||||
**kwargs are used.
|
||||
|
||||
Returns:
|
||||
subprocess.Popen: Return executed process as Popen object.
|
||||
"""
|
||||
return self.manager.launch(self.full_name, *args, **kwargs)
|
||||
|
||||
|
||||
class EnvironmentToolGroup:
|
||||
"""Hold information about environment tool group.
|
||||
|
||||
Environment tool group may hold different variants of same tool and set
|
||||
environments that are same for all of them.
|
||||
|
||||
e.g. "mtoa" may have different versions but all environments except one
|
||||
are same.
|
||||
|
||||
Args:
|
||||
data (dict): Group information with variants.
|
||||
manager (ApplicationManager): Manager that creates the group.
|
||||
"""
|
||||
|
||||
def __init__(self, data, manager):
|
||||
name = data["name"]
|
||||
label = data["label"]
|
||||
|
||||
self.name = name
|
||||
self.label = label
|
||||
self._data = data
|
||||
self.manager = manager
|
||||
|
||||
environment = {}
|
||||
try:
|
||||
environment = json.loads(data["environment"])
|
||||
except Exception:
|
||||
pass
|
||||
self._environment = environment
|
||||
|
||||
variants = data.get("variants") or []
|
||||
variants_by_name = {}
|
||||
for variant_data in variants:
|
||||
tool = EnvironmentTool(variant_data, self)
|
||||
variants_by_name[tool.name] = tool
|
||||
self.variants = variants_by_name
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}> - {}".format(self.__class__.__name__, self.name)
|
||||
|
||||
def __iter__(self):
|
||||
for variant in self.variants.values():
|
||||
yield variant
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
return copy.deepcopy(self._environment)
|
||||
|
||||
|
||||
class EnvironmentTool:
|
||||
"""Hold information about application tool.
|
||||
|
||||
Structure of tool information.
|
||||
|
||||
Args:
|
||||
variant_data (dict): Variant data with environments and
|
||||
host and app variant filters.
|
||||
group (EnvironmentToolGroup): Name of group which wraps tool.
|
||||
"""
|
||||
|
||||
def __init__(self, variant_data, group):
|
||||
# Backwards compatibility 3.9.1 - 3.9.2
|
||||
# - 'variant_data' contained only environments but contain also host
|
||||
# and application variant filters
|
||||
name = variant_data["name"]
|
||||
label = variant_data["label"]
|
||||
host_names = variant_data["host_names"]
|
||||
app_variants = variant_data["app_variants"]
|
||||
|
||||
environment = {}
|
||||
try:
|
||||
environment = json.loads(variant_data["environment"])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self.host_names = host_names
|
||||
self.app_variants = app_variants
|
||||
self.name = name
|
||||
self.variant_label = label
|
||||
self.label = " ".join((group.label, label))
|
||||
self.group = group
|
||||
|
||||
self._environment = environment
|
||||
self.full_name = "/".join((group.name, name))
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}> - {}".format(self.__class__.__name__, self.full_name)
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
return copy.deepcopy(self._environment)
|
||||
|
||||
def is_valid_for_app(self, app):
|
||||
"""Is tool valid for application.
|
||||
|
||||
Args:
|
||||
app (Application): Application for which are prepared environments.
|
||||
"""
|
||||
if self.app_variants and app.full_name not in self.app_variants:
|
||||
return False
|
||||
|
||||
if self.host_names and app.host_name not in self.host_names:
|
||||
return False
|
||||
return True
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
class ApplicationNotFound(Exception):
|
||||
"""Application was not found in ApplicationManager by name."""
|
||||
|
||||
def __init__(self, app_name):
|
||||
self.app_name = app_name
|
||||
super(ApplicationNotFound, self).__init__(
|
||||
"Application \"{}\" was not found.".format(app_name)
|
||||
)
|
||||
|
||||
|
||||
class ApplicationExecutableNotFound(Exception):
|
||||
"""Defined executable paths are not available on the machine."""
|
||||
|
||||
def __init__(self, application):
|
||||
self.application = application
|
||||
details = None
|
||||
if not application.executables:
|
||||
msg = (
|
||||
"Executable paths for application \"{}\"({}) are not set."
|
||||
)
|
||||
else:
|
||||
msg = (
|
||||
"Defined executable paths for application \"{}\"({})"
|
||||
" are not available on this machine."
|
||||
)
|
||||
details = "Defined paths:"
|
||||
for executable in application.executables:
|
||||
details += "\n- " + executable.executable_path
|
||||
|
||||
self.msg = msg.format(application.full_label, application.full_name)
|
||||
self.details = details
|
||||
|
||||
exc_mgs = str(self.msg)
|
||||
if details:
|
||||
# Is good idea to pass new line symbol to exception message?
|
||||
exc_mgs += "\n" + details
|
||||
self.exc_msg = exc_mgs
|
||||
super(ApplicationExecutableNotFound, self).__init__(exc_mgs)
|
||||
|
||||
|
||||
class ApplicationLaunchFailed(Exception):
|
||||
"""Application launch failed due to known reason.
|
||||
|
||||
Message should be self explanatory as traceback won't be shown.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class MissingRequiredKey(KeyError):
|
||||
pass
|
||||
150
server_addon/applications/client/ayon_applications/hooks.py
Normal file
150
server_addon/applications/client/ayon_applications/hooks.py
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
import platform
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
import six
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
|
||||
from .defs import LaunchTypes
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class LaunchHook:
|
||||
"""Abstract base class of launch hook."""
|
||||
# Order of prelaunch hook, will be executed as last if set to None.
|
||||
order = None
|
||||
# List of host implementations, skipped if empty.
|
||||
hosts = set()
|
||||
# Set of application groups
|
||||
app_groups = set()
|
||||
# Set of specific application names
|
||||
app_names = set()
|
||||
# Set of platform availability
|
||||
platforms = set()
|
||||
# Set of launch types for which is available
|
||||
# - if empty then is available for all launch types
|
||||
# - by default has 'local' which is most common reason for launc hooks
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def __init__(self, launch_context):
|
||||
"""Constructor of launch hook.
|
||||
|
||||
Always should be called
|
||||
"""
|
||||
self.log = Logger.get_logger(self.__class__.__name__)
|
||||
|
||||
self.launch_context = launch_context
|
||||
|
||||
is_valid = self.class_validation(launch_context)
|
||||
if is_valid:
|
||||
is_valid = self.validate()
|
||||
|
||||
self.is_valid = is_valid
|
||||
|
||||
@classmethod
|
||||
def class_validation(cls, launch_context):
|
||||
"""Validation of class attributes by launch context.
|
||||
|
||||
Args:
|
||||
launch_context (ApplicationLaunchContext): Context of launching
|
||||
application.
|
||||
|
||||
Returns:
|
||||
bool: Is launch hook valid for the context by class attributes.
|
||||
"""
|
||||
if cls.platforms:
|
||||
low_platforms = tuple(
|
||||
_platform.lower()
|
||||
for _platform in cls.platforms
|
||||
)
|
||||
if platform.system().lower() not in low_platforms:
|
||||
return False
|
||||
|
||||
if cls.hosts:
|
||||
if launch_context.host_name not in cls.hosts:
|
||||
return False
|
||||
|
||||
if cls.app_groups:
|
||||
if launch_context.app_group.name not in cls.app_groups:
|
||||
return False
|
||||
|
||||
if cls.app_names:
|
||||
if launch_context.app_name not in cls.app_names:
|
||||
return False
|
||||
|
||||
if cls.launch_types:
|
||||
if launch_context.launch_type not in cls.launch_types:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
return self.launch_context.data
|
||||
|
||||
@property
|
||||
def application(self):
|
||||
return getattr(self.launch_context, "application", None)
|
||||
|
||||
@property
|
||||
def manager(self):
|
||||
return getattr(self.application, "manager", None)
|
||||
|
||||
@property
|
||||
def host_name(self):
|
||||
return getattr(self.application, "host_name", None)
|
||||
|
||||
@property
|
||||
def app_group(self):
|
||||
return getattr(self.application, "group", None)
|
||||
|
||||
@property
|
||||
def app_name(self):
|
||||
return getattr(self.application, "full_name", None)
|
||||
|
||||
@property
|
||||
def addons_manager(self):
|
||||
return getattr(self.launch_context, "addons_manager", None)
|
||||
|
||||
@property
|
||||
def modules_manager(self):
|
||||
"""
|
||||
Deprecated:
|
||||
Use 'addons_wrapper' instead.
|
||||
"""
|
||||
return self.addons_manager
|
||||
|
||||
def validate(self):
|
||||
"""Optional validation of launch hook on initialization.
|
||||
|
||||
Returns:
|
||||
bool: Hook is valid (True) or invalid (False).
|
||||
"""
|
||||
# QUESTION Not sure if this method has any usable potential.
|
||||
# - maybe result can be based on settings
|
||||
return True
|
||||
|
||||
@abstractmethod
|
||||
def execute(self, *args, **kwargs):
|
||||
"""Abstract execute method where logic of hook is."""
|
||||
pass
|
||||
|
||||
|
||||
class PreLaunchHook(LaunchHook):
|
||||
"""Abstract class of prelaunch hook.
|
||||
|
||||
This launch hook will be processed before application is launched.
|
||||
|
||||
If any exception will happen during processing the application won't be
|
||||
launched.
|
||||
"""
|
||||
|
||||
|
||||
class PostLaunchHook(LaunchHook):
|
||||
"""Abstract class of postlaunch hook.
|
||||
|
||||
This launch hook will be processed after application is launched.
|
||||
|
||||
Nothing will happen if any exception will happen during processing. And
|
||||
processing of other postlaunch hooks won't stop either.
|
||||
"""
|
||||
676
server_addon/applications/client/ayon_applications/manager.py
Normal file
676
server_addon/applications/client/ayon_applications/manager.py
Normal file
|
|
@ -0,0 +1,676 @@
|
|||
import os
|
||||
import sys
|
||||
import copy
|
||||
import json
|
||||
import tempfile
|
||||
import platform
|
||||
import inspect
|
||||
import subprocess
|
||||
|
||||
import six
|
||||
|
||||
from ayon_core import AYON_CORE_ROOT
|
||||
from ayon_core.settings import get_studio_settings
|
||||
from ayon_core.lib import (
|
||||
Logger,
|
||||
modules_from_path,
|
||||
classes_from_module,
|
||||
get_linux_launcher_args,
|
||||
)
|
||||
from ayon_core.addon import AddonsManager
|
||||
|
||||
from .constants import DEFAULT_ENV_SUBGROUP
|
||||
from .exceptions import (
|
||||
ApplicationNotFound,
|
||||
ApplicationExecutableNotFound,
|
||||
)
|
||||
from .hooks import PostLaunchHook, PreLaunchHook
|
||||
from .defs import EnvironmentToolGroup, ApplicationGroup, LaunchTypes
|
||||
|
||||
|
||||
class ApplicationManager:
|
||||
"""Load applications and tools and store them by their full name.
|
||||
|
||||
Args:
|
||||
studio_settings (dict): Preloaded studio settings. When passed manager
|
||||
will always use these values. Gives ability to create manager
|
||||
using different settings.
|
||||
"""
|
||||
|
||||
def __init__(self, studio_settings=None):
|
||||
self.log = Logger.get_logger(self.__class__.__name__)
|
||||
|
||||
self.app_groups = {}
|
||||
self.applications = {}
|
||||
self.tool_groups = {}
|
||||
self.tools = {}
|
||||
|
||||
self._studio_settings = studio_settings
|
||||
|
||||
self.refresh()
|
||||
|
||||
def set_studio_settings(self, studio_settings):
|
||||
"""Ability to change init system settings.
|
||||
|
||||
This will trigger refresh of manager.
|
||||
"""
|
||||
self._studio_settings = studio_settings
|
||||
|
||||
self.refresh()
|
||||
|
||||
def refresh(self):
|
||||
"""Refresh applications from settings."""
|
||||
self.app_groups.clear()
|
||||
self.applications.clear()
|
||||
self.tool_groups.clear()
|
||||
self.tools.clear()
|
||||
|
||||
if self._studio_settings is not None:
|
||||
settings = copy.deepcopy(self._studio_settings)
|
||||
else:
|
||||
settings = get_studio_settings(
|
||||
clear_metadata=False, exclude_locals=False
|
||||
)
|
||||
|
||||
applications_addon_settings = settings["applications"]
|
||||
|
||||
# Prepare known applications
|
||||
app_defs = applications_addon_settings["applications"]
|
||||
additional_apps = app_defs.pop("additional_apps")
|
||||
for additional_app in additional_apps:
|
||||
app_name = additional_app.pop("name")
|
||||
if app_name in app_defs:
|
||||
self.log.warning((
|
||||
"Additional application '{}' is already"
|
||||
" in built-in applications."
|
||||
).format(app_name))
|
||||
app_defs[app_name] = additional_app
|
||||
|
||||
for group_name, variant_defs in app_defs.items():
|
||||
group = ApplicationGroup(group_name, variant_defs, self)
|
||||
self.app_groups[group_name] = group
|
||||
for app in group:
|
||||
self.applications[app.full_name] = app
|
||||
|
||||
tools_definitions = applications_addon_settings["tool_groups"]
|
||||
for tool_group_data in tools_definitions:
|
||||
group = EnvironmentToolGroup(tool_group_data, self)
|
||||
self.tool_groups[group.name] = group
|
||||
for tool in group:
|
||||
self.tools[tool.full_name] = tool
|
||||
|
||||
def find_latest_available_variant_for_group(self, group_name):
|
||||
group = self.app_groups.get(group_name)
|
||||
if group is None or not group.enabled:
|
||||
return None
|
||||
|
||||
output = None
|
||||
for _, variant in reversed(sorted(group.variants.items())):
|
||||
executable = variant.find_executable()
|
||||
if executable:
|
||||
output = variant
|
||||
break
|
||||
return output
|
||||
|
||||
def create_launch_context(self, app_name, **data):
|
||||
"""Prepare launch context for application.
|
||||
|
||||
Args:
|
||||
app_name (str): Name of application that should be launched.
|
||||
**data (Any): Any additional data. Data may be used during
|
||||
|
||||
Returns:
|
||||
ApplicationLaunchContext: Launch context for application.
|
||||
|
||||
Raises:
|
||||
ApplicationNotFound: Application was not found by entered name.
|
||||
"""
|
||||
|
||||
app = self.applications.get(app_name)
|
||||
if not app:
|
||||
raise ApplicationNotFound(app_name)
|
||||
|
||||
executable = app.find_executable()
|
||||
|
||||
return ApplicationLaunchContext(
|
||||
app, executable, **data
|
||||
)
|
||||
|
||||
def launch_with_context(self, launch_context):
|
||||
"""Launch application using existing launch context.
|
||||
|
||||
Args:
|
||||
launch_context (ApplicationLaunchContext): Prepared launch
|
||||
context.
|
||||
"""
|
||||
|
||||
if not launch_context.executable:
|
||||
raise ApplicationExecutableNotFound(launch_context.application)
|
||||
return launch_context.launch()
|
||||
|
||||
def launch(self, app_name, **data):
|
||||
"""Launch procedure.
|
||||
|
||||
For host application it's expected to contain "project_name",
|
||||
"folder_path" and "task_name".
|
||||
|
||||
Args:
|
||||
app_name (str): Name of application that should be launched.
|
||||
**data (dict): Any additional data. Data may be used during
|
||||
preparation to store objects usable in multiple places.
|
||||
|
||||
Raises:
|
||||
ApplicationNotFound: Application was not found by entered
|
||||
argument `app_name`.
|
||||
ApplicationExecutableNotFound: Executables in application definition
|
||||
were not found on this machine.
|
||||
ApplicationLaunchFailed: Something important for application launch
|
||||
failed. Exception should contain explanation message,
|
||||
traceback should not be needed.
|
||||
"""
|
||||
|
||||
context = self.create_launch_context(app_name, **data)
|
||||
return self.launch_with_context(context)
|
||||
|
||||
|
||||
class ApplicationLaunchContext:
|
||||
"""Context of launching application.
|
||||
|
||||
Main purpose of context is to prepare launch arguments and keyword
|
||||
arguments for new process. Most important part of keyword arguments
|
||||
preparations are environment variables.
|
||||
|
||||
During the whole process is possible to use `data` attribute to store
|
||||
object usable in multiple places.
|
||||
|
||||
Launch arguments are strings in list. It is possible to "chain" argument
|
||||
when order of them matters. That is possible to do with adding list where
|
||||
order is right and should not change.
|
||||
NOTE: This is recommendation, not requirement.
|
||||
e.g.: `["nuke.exe", "--NukeX"]` -> In this case any part of process may
|
||||
insert argument between `nuke.exe` and `--NukeX`. To keep them together
|
||||
it is better to wrap them in another list: `[["nuke.exe", "--NukeX"]]`.
|
||||
|
||||
Notes:
|
||||
It is possible to use launch context only to prepare environment
|
||||
variables. In that case `executable` may be None and can be used
|
||||
'run_prelaunch_hooks' method to run prelaunch hooks which prepare
|
||||
them.
|
||||
|
||||
Args:
|
||||
application (Application): Application definition.
|
||||
executable (ApplicationExecutable): Object with path to executable.
|
||||
env_group (Optional[str]): Environment variable group. If not set
|
||||
'DEFAULT_ENV_SUBGROUP' is used.
|
||||
launch_type (Optional[str]): Launch type. If not set 'local' is used.
|
||||
**data (dict): Any additional data. Data may be used during
|
||||
preparation to store objects usable in multiple places.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
application,
|
||||
executable,
|
||||
env_group=None,
|
||||
launch_type=None,
|
||||
**data
|
||||
):
|
||||
# Application object
|
||||
self.application = application
|
||||
|
||||
self.addons_manager = AddonsManager()
|
||||
|
||||
# Logger
|
||||
logger_name = "{}-{}".format(self.__class__.__name__,
|
||||
self.application.full_name)
|
||||
self.log = Logger.get_logger(logger_name)
|
||||
|
||||
self.executable = executable
|
||||
|
||||
if launch_type is None:
|
||||
launch_type = LaunchTypes.local
|
||||
self.launch_type = launch_type
|
||||
|
||||
if env_group is None:
|
||||
env_group = DEFAULT_ENV_SUBGROUP
|
||||
|
||||
self.env_group = env_group
|
||||
|
||||
self.data = dict(data)
|
||||
|
||||
launch_args = []
|
||||
if executable is not None:
|
||||
launch_args = executable.as_args()
|
||||
# subprocess.Popen launch arguments (first argument in constructor)
|
||||
self.launch_args = launch_args
|
||||
self.launch_args.extend(application.arguments)
|
||||
if self.data.get("app_args"):
|
||||
self.launch_args.extend(self.data.pop("app_args"))
|
||||
|
||||
# Handle launch environemtns
|
||||
src_env = self.data.pop("env", None)
|
||||
if src_env is not None and not isinstance(src_env, dict):
|
||||
self.log.warning((
|
||||
"Passed `env` kwarg has invalid type: {}. Expected: `dict`."
|
||||
" Using `os.environ` instead."
|
||||
).format(str(type(src_env))))
|
||||
src_env = None
|
||||
|
||||
if src_env is None:
|
||||
src_env = os.environ
|
||||
|
||||
ignored_env = {"QT_API", }
|
||||
env = {
|
||||
key: str(value)
|
||||
for key, value in src_env.items()
|
||||
if key not in ignored_env
|
||||
}
|
||||
# subprocess.Popen keyword arguments
|
||||
self.kwargs = {"env": env}
|
||||
|
||||
if platform.system().lower() == "windows":
|
||||
# Detach new process from currently running process on Windows
|
||||
flags = (
|
||||
subprocess.CREATE_NEW_PROCESS_GROUP
|
||||
| subprocess.DETACHED_PROCESS
|
||||
)
|
||||
self.kwargs["creationflags"] = flags
|
||||
|
||||
if not sys.stdout:
|
||||
self.kwargs["stdout"] = subprocess.DEVNULL
|
||||
self.kwargs["stderr"] = subprocess.DEVNULL
|
||||
|
||||
self.prelaunch_hooks = None
|
||||
self.postlaunch_hooks = None
|
||||
|
||||
self.process = None
|
||||
self._prelaunch_hooks_executed = False
|
||||
|
||||
@property
|
||||
def env(self):
|
||||
if (
|
||||
"env" not in self.kwargs
|
||||
or self.kwargs["env"] is None
|
||||
):
|
||||
self.kwargs["env"] = {}
|
||||
return self.kwargs["env"]
|
||||
|
||||
@env.setter
|
||||
def env(self, value):
|
||||
if not isinstance(value, dict):
|
||||
raise ValueError(
|
||||
"'env' attribute expect 'dict' object. Got: {}".format(
|
||||
str(type(value))
|
||||
)
|
||||
)
|
||||
self.kwargs["env"] = value
|
||||
|
||||
@property
|
||||
def modules_manager(self):
|
||||
"""
|
||||
Deprecated:
|
||||
Use 'addons_manager' instead.
|
||||
|
||||
"""
|
||||
return self.addons_manager
|
||||
|
||||
def _collect_addons_launch_hook_paths(self):
|
||||
"""Helper to collect application launch hooks from addons.
|
||||
|
||||
Module have to have implemented 'get_launch_hook_paths' method which
|
||||
can expect application as argument or nothing.
|
||||
|
||||
Returns:
|
||||
List[str]: Paths to launch hook directories.
|
||||
"""
|
||||
|
||||
expected_types = (list, tuple, set)
|
||||
|
||||
output = []
|
||||
for module in self.addons_manager.get_enabled_addons():
|
||||
# Skip module if does not have implemented 'get_launch_hook_paths'
|
||||
func = getattr(module, "get_launch_hook_paths", None)
|
||||
if func is None:
|
||||
continue
|
||||
|
||||
func = module.get_launch_hook_paths
|
||||
if hasattr(inspect, "signature"):
|
||||
sig = inspect.signature(func)
|
||||
expect_args = len(sig.parameters) > 0
|
||||
else:
|
||||
expect_args = len(inspect.getargspec(func)[0]) > 0
|
||||
|
||||
# Pass application argument if method expect it.
|
||||
try:
|
||||
if expect_args:
|
||||
hook_paths = func(self.application)
|
||||
else:
|
||||
hook_paths = func()
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Failed to call 'get_launch_hook_paths'",
|
||||
exc_info=True
|
||||
)
|
||||
continue
|
||||
|
||||
if not hook_paths:
|
||||
continue
|
||||
|
||||
# Convert string to list
|
||||
if isinstance(hook_paths, six.string_types):
|
||||
hook_paths = [hook_paths]
|
||||
|
||||
# Skip invalid types
|
||||
if not isinstance(hook_paths, expected_types):
|
||||
self.log.warning((
|
||||
"Result of `get_launch_hook_paths`"
|
||||
" has invalid type {}. Expected {}"
|
||||
).format(type(hook_paths), expected_types))
|
||||
continue
|
||||
|
||||
output.extend(hook_paths)
|
||||
return output
|
||||
|
||||
def paths_to_launch_hooks(self):
|
||||
"""Directory paths where to look for launch hooks."""
|
||||
# This method has potential to be part of application manager (maybe).
|
||||
paths = []
|
||||
|
||||
# TODO load additional studio paths from settings
|
||||
global_hooks_dir = os.path.join(AYON_CORE_ROOT, "hooks")
|
||||
|
||||
hooks_dirs = [
|
||||
global_hooks_dir
|
||||
]
|
||||
if self.host_name:
|
||||
# If host requires launch hooks and is module then launch hooks
|
||||
# should be collected using 'collect_launch_hook_paths'
|
||||
# - module have to implement 'get_launch_hook_paths'
|
||||
host_module = self.addons_manager.get_host_addon(self.host_name)
|
||||
if not host_module:
|
||||
hooks_dirs.append(os.path.join(
|
||||
AYON_CORE_ROOT, "hosts", self.host_name, "hooks"
|
||||
))
|
||||
|
||||
for path in hooks_dirs:
|
||||
if (
|
||||
os.path.exists(path)
|
||||
and os.path.isdir(path)
|
||||
and path not in paths
|
||||
):
|
||||
paths.append(path)
|
||||
|
||||
# Load modules paths
|
||||
paths.extend(self._collect_addons_launch_hook_paths())
|
||||
|
||||
return paths
|
||||
|
||||
def discover_launch_hooks(self, force=False):
|
||||
"""Load and prepare launch hooks."""
|
||||
if (
|
||||
self.prelaunch_hooks is not None
|
||||
or self.postlaunch_hooks is not None
|
||||
):
|
||||
if not force:
|
||||
self.log.info("Launch hooks were already discovered.")
|
||||
return
|
||||
|
||||
self.prelaunch_hooks.clear()
|
||||
self.postlaunch_hooks.clear()
|
||||
|
||||
self.log.debug("Discovery of launch hooks started.")
|
||||
|
||||
paths = self.paths_to_launch_hooks()
|
||||
self.log.debug("Paths searched for launch hooks:\n{}".format(
|
||||
"\n".join("- {}".format(path) for path in paths)
|
||||
))
|
||||
|
||||
all_classes = {
|
||||
"pre": [],
|
||||
"post": []
|
||||
}
|
||||
for path in paths:
|
||||
if not os.path.exists(path):
|
||||
self.log.info(
|
||||
"Path to launch hooks does not exist: \"{}\"".format(path)
|
||||
)
|
||||
continue
|
||||
|
||||
modules, _crashed = modules_from_path(path)
|
||||
for _filepath, module in modules:
|
||||
all_classes["pre"].extend(
|
||||
classes_from_module(PreLaunchHook, module)
|
||||
)
|
||||
all_classes["post"].extend(
|
||||
classes_from_module(PostLaunchHook, module)
|
||||
)
|
||||
|
||||
for launch_type, classes in all_classes.items():
|
||||
hooks_with_order = []
|
||||
hooks_without_order = []
|
||||
for klass in classes:
|
||||
try:
|
||||
hook = klass(self)
|
||||
if not hook.is_valid:
|
||||
self.log.debug(
|
||||
"Skipped hook invalid for current launch context: "
|
||||
"{}".format(klass.__name__)
|
||||
)
|
||||
continue
|
||||
|
||||
if inspect.isabstract(hook):
|
||||
self.log.debug("Skipped abstract hook: {}".format(
|
||||
klass.__name__
|
||||
))
|
||||
continue
|
||||
|
||||
# Separate hooks by pre/post class
|
||||
if hook.order is None:
|
||||
hooks_without_order.append(hook)
|
||||
else:
|
||||
hooks_with_order.append(hook)
|
||||
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Initialization of hook failed: "
|
||||
"{}".format(klass.__name__),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Sort hooks with order by order
|
||||
ordered_hooks = list(sorted(
|
||||
hooks_with_order, key=lambda obj: obj.order
|
||||
))
|
||||
# Extend ordered hooks with hooks without defined order
|
||||
ordered_hooks.extend(hooks_without_order)
|
||||
|
||||
if launch_type == "pre":
|
||||
self.prelaunch_hooks = ordered_hooks
|
||||
else:
|
||||
self.postlaunch_hooks = ordered_hooks
|
||||
|
||||
self.log.debug("Found {} prelaunch and {} postlaunch hooks.".format(
|
||||
len(self.prelaunch_hooks), len(self.postlaunch_hooks)
|
||||
))
|
||||
|
||||
@property
|
||||
def app_name(self):
|
||||
return self.application.name
|
||||
|
||||
@property
|
||||
def host_name(self):
|
||||
return self.application.host_name
|
||||
|
||||
@property
|
||||
def app_group(self):
|
||||
return self.application.group
|
||||
|
||||
@property
|
||||
def manager(self):
|
||||
return self.application.manager
|
||||
|
||||
def _run_process(self):
|
||||
# Windows and MacOS have easier process start
|
||||
low_platform = platform.system().lower()
|
||||
if low_platform in ("windows", "darwin"):
|
||||
return subprocess.Popen(self.launch_args, **self.kwargs)
|
||||
|
||||
# Linux uses mid process
|
||||
# - it is possible that the mid process executable is not
|
||||
# available for this version of AYON in that case use standard
|
||||
# launch
|
||||
launch_args = get_linux_launcher_args()
|
||||
if launch_args is None:
|
||||
return subprocess.Popen(self.launch_args, **self.kwargs)
|
||||
|
||||
# Prepare data that will be passed to midprocess
|
||||
# - store arguments to a json and pass path to json as last argument
|
||||
# - pass environments to set
|
||||
app_env = self.kwargs.pop("env", {})
|
||||
json_data = {
|
||||
"args": self.launch_args,
|
||||
"env": app_env
|
||||
}
|
||||
if app_env:
|
||||
# Filter environments of subprocess
|
||||
self.kwargs["env"] = {
|
||||
key: value
|
||||
for key, value in os.environ.items()
|
||||
if key in app_env
|
||||
}
|
||||
|
||||
# Create temp file
|
||||
json_temp = tempfile.NamedTemporaryFile(
|
||||
mode="w", prefix="op_app_args", suffix=".json", delete=False
|
||||
)
|
||||
json_temp.close()
|
||||
json_temp_filpath = json_temp.name
|
||||
with open(json_temp_filpath, "w") as stream:
|
||||
json.dump(json_data, stream)
|
||||
|
||||
launch_args.append(json_temp_filpath)
|
||||
|
||||
# Create mid-process which will launch application
|
||||
process = subprocess.Popen(launch_args, **self.kwargs)
|
||||
# Wait until the process finishes
|
||||
# - This is important! The process would stay in "open" state.
|
||||
process.wait()
|
||||
# Remove the temp file
|
||||
os.remove(json_temp_filpath)
|
||||
# Return process which is already terminated
|
||||
return process
|
||||
|
||||
def run_prelaunch_hooks(self):
|
||||
"""Run prelaunch hooks.
|
||||
|
||||
This method will be executed only once, any future calls will skip
|
||||
the processing.
|
||||
"""
|
||||
|
||||
if self._prelaunch_hooks_executed:
|
||||
self.log.warning("Prelaunch hooks were already executed.")
|
||||
return
|
||||
# Discover launch hooks
|
||||
self.discover_launch_hooks()
|
||||
|
||||
# Execute prelaunch hooks
|
||||
for prelaunch_hook in self.prelaunch_hooks:
|
||||
self.log.debug("Executing prelaunch hook: {}".format(
|
||||
str(prelaunch_hook.__class__.__name__)
|
||||
))
|
||||
prelaunch_hook.execute()
|
||||
self._prelaunch_hooks_executed = True
|
||||
|
||||
def launch(self):
|
||||
"""Collect data for new process and then create it.
|
||||
|
||||
This method must not be executed more than once.
|
||||
|
||||
Returns:
|
||||
subprocess.Popen: Created process as Popen object.
|
||||
"""
|
||||
if self.process is not None:
|
||||
self.log.warning("Application was already launched.")
|
||||
return
|
||||
|
||||
if not self._prelaunch_hooks_executed:
|
||||
self.run_prelaunch_hooks()
|
||||
|
||||
self.log.debug("All prelaunch hook executed. Starting new process.")
|
||||
|
||||
# Prepare subprocess args
|
||||
args_len_str = ""
|
||||
if isinstance(self.launch_args, str):
|
||||
args = self.launch_args
|
||||
else:
|
||||
args = self.clear_launch_args(self.launch_args)
|
||||
args_len_str = " ({})".format(len(args))
|
||||
self.log.info(
|
||||
"Launching \"{}\" with args{}: {}".format(
|
||||
self.application.full_name, args_len_str, args
|
||||
)
|
||||
)
|
||||
self.launch_args = args
|
||||
|
||||
# Run process
|
||||
self.process = self._run_process()
|
||||
|
||||
# Process post launch hooks
|
||||
for postlaunch_hook in self.postlaunch_hooks:
|
||||
self.log.debug("Executing postlaunch hook: {}".format(
|
||||
str(postlaunch_hook.__class__.__name__)
|
||||
))
|
||||
|
||||
# TODO how to handle errors?
|
||||
# - store to variable to let them accessible?
|
||||
try:
|
||||
postlaunch_hook.execute()
|
||||
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"After launch procedures were not successful.",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
self.log.debug("Launch of {} finished.".format(
|
||||
self.application.full_name
|
||||
))
|
||||
|
||||
return self.process
|
||||
|
||||
@staticmethod
|
||||
def clear_launch_args(args):
|
||||
"""Collect launch arguments to final order.
|
||||
|
||||
Launch argument should be list that may contain another lists this
|
||||
function will upack inner lists and keep ordering.
|
||||
|
||||
```
|
||||
# source
|
||||
[ [ arg1, [ arg2, arg3 ] ], arg4, [arg5, arg6]]
|
||||
# result
|
||||
[ arg1, arg2, arg3, arg4, arg5, arg6]
|
||||
|
||||
Args:
|
||||
args (list): Source arguments in list may contain inner lists.
|
||||
|
||||
Return:
|
||||
list: Unpacked arguments.
|
||||
"""
|
||||
if isinstance(args, str):
|
||||
return args
|
||||
all_cleared = False
|
||||
while not all_cleared:
|
||||
all_cleared = True
|
||||
new_args = []
|
||||
for arg in args:
|
||||
if isinstance(arg, (list, tuple, set)):
|
||||
all_cleared = False
|
||||
for _arg in arg:
|
||||
new_args.append(_arg)
|
||||
else:
|
||||
new_args.append(arg)
|
||||
args = new_args
|
||||
|
||||
return args
|
||||
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
"""
|
||||
Run after global plugin 'CollectHostName' in ayon_core.
|
||||
|
||||
Requires:
|
||||
None
|
||||
|
||||
Provides:
|
||||
context -> hostName (str)
|
||||
context -> appName (str)
|
||||
context -> appLabel (str)
|
||||
"""
|
||||
import os
|
||||
import pyblish.api
|
||||
|
||||
from ayon_applications import ApplicationManager
|
||||
|
||||
|
||||
class CollectAppName(pyblish.api.ContextPlugin):
|
||||
"""Collect avalon host name to context."""
|
||||
|
||||
label = "Collect App Name"
|
||||
order = pyblish.api.CollectorOrder - 0.499999
|
||||
|
||||
def process(self, context):
|
||||
host_name = context.data.get("hostName")
|
||||
app_name = context.data.get("appName")
|
||||
app_label = context.data.get("appLabel")
|
||||
# Don't override value if is already set
|
||||
if host_name and app_name and app_label:
|
||||
return
|
||||
|
||||
# Use AYON_APP_NAME to get full app name
|
||||
if not app_name:
|
||||
app_name = os.environ.get("AYON_APP_NAME")
|
||||
|
||||
# Fill missing values based on app full name
|
||||
if (not host_name or not app_label) and app_name:
|
||||
app_manager = ApplicationManager()
|
||||
app = app_manager.applications.get(app_name)
|
||||
if app:
|
||||
if not host_name:
|
||||
host_name = app.host_name
|
||||
if not app_label:
|
||||
app_label = app.full_label
|
||||
|
||||
context.data["hostName"] = host_name
|
||||
context.data["appName"] = app_name
|
||||
context.data["appLabel"] = app_label
|
||||
609
server_addon/applications/client/ayon_applications/utils.py
Normal file
609
server_addon/applications/client/ayon_applications/utils.py
Normal file
|
|
@ -0,0 +1,609 @@
|
|||
import os
|
||||
import copy
|
||||
import json
|
||||
import platform
|
||||
import collections
|
||||
|
||||
import six
|
||||
import acre
|
||||
|
||||
from ayon_core import AYON_CORE_ROOT
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.lib import Logger, get_ayon_username
|
||||
from ayon_core.addon import AddonsManager
|
||||
from ayon_core.pipeline import HOST_WORKFILE_EXTENSIONS
|
||||
from ayon_core.pipeline.template_data import get_template_data
|
||||
from ayon_core.pipeline.workfile import (
|
||||
get_workfile_template_key,
|
||||
get_workdir_with_workdir_data,
|
||||
get_last_workfile,
|
||||
should_use_last_workfile_on_launch,
|
||||
should_open_workfiles_tool_on_launch,
|
||||
)
|
||||
|
||||
from .constants import PLATFORM_NAMES, DEFAULT_ENV_SUBGROUP
|
||||
from .exceptions import MissingRequiredKey, ApplicationLaunchFailed
|
||||
from .manager import ApplicationManager
|
||||
|
||||
|
||||
def parse_environments(env_data, env_group=None, platform_name=None):
|
||||
"""Parse environment values from settings byt group and platform.
|
||||
|
||||
Data may contain up to 2 hierarchical levels of dictionaries. At the end
|
||||
of the last level must be string or list. List is joined using platform
|
||||
specific joiner (';' for windows and ':' for linux and mac).
|
||||
|
||||
Hierarchical levels can contain keys for subgroups and platform name.
|
||||
Platform specific values must be always last level of dictionary. Platform
|
||||
names are "windows" (MS Windows), "linux" (any linux distribution) and
|
||||
"darwin" (any MacOS distribution).
|
||||
|
||||
Subgroups are helpers added mainly for standard and on farm usage. Farm
|
||||
may require different environments for e.g. licence related values or
|
||||
plugins. Default subgroup is "standard".
|
||||
|
||||
Examples:
|
||||
```
|
||||
{
|
||||
# Unchanged value
|
||||
"ENV_KEY1": "value",
|
||||
# Empty values are kept (unset environment variable)
|
||||
"ENV_KEY2": "",
|
||||
|
||||
# Join list values with ':' or ';'
|
||||
"ENV_KEY3": ["value1", "value2"],
|
||||
|
||||
# Environment groups
|
||||
"ENV_KEY4": {
|
||||
"standard": "DEMO_SERVER_URL",
|
||||
"farm": "LICENCE_SERVER_URL"
|
||||
},
|
||||
|
||||
# Platform specific (and only for windows and mac)
|
||||
"ENV_KEY5": {
|
||||
"windows": "windows value",
|
||||
"darwin": ["value 1", "value 2"]
|
||||
},
|
||||
|
||||
# Environment groups and platform combination
|
||||
"ENV_KEY6": {
|
||||
"farm": "FARM_VALUE",
|
||||
"standard": {
|
||||
"windows": ["value1", "value2"],
|
||||
"linux": "value1",
|
||||
"darwin": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
"""
|
||||
output = {}
|
||||
if not env_data:
|
||||
return output
|
||||
|
||||
if not env_group:
|
||||
env_group = DEFAULT_ENV_SUBGROUP
|
||||
|
||||
if not platform_name:
|
||||
platform_name = platform.system().lower()
|
||||
|
||||
for key, value in env_data.items():
|
||||
if isinstance(value, dict):
|
||||
# Look if any key is platform key
|
||||
# - expect that represents environment group if does not contain
|
||||
# platform keys
|
||||
if not PLATFORM_NAMES.intersection(set(value.keys())):
|
||||
# Skip the key if group is not available
|
||||
if env_group not in value:
|
||||
continue
|
||||
value = value[env_group]
|
||||
|
||||
# Check again if value is dictionary
|
||||
# - this time there should be only platform keys
|
||||
if isinstance(value, dict):
|
||||
value = value.get(platform_name)
|
||||
|
||||
# Check if value is list and join it's values
|
||||
# QUESTION Should empty values be skipped?
|
||||
if isinstance(value, (list, tuple)):
|
||||
value = os.pathsep.join(value)
|
||||
|
||||
# Set key to output if value is string
|
||||
if isinstance(value, six.string_types):
|
||||
output[key] = value
|
||||
return output
|
||||
|
||||
|
||||
class EnvironmentPrepData(dict):
|
||||
"""Helper dictionary for storin temp data during environment prep.
|
||||
|
||||
Args:
|
||||
data (dict): Data must contain required keys.
|
||||
"""
|
||||
required_keys = (
|
||||
"project_entity", "folder_entity", "task_entity", "app", "anatomy"
|
||||
)
|
||||
|
||||
def __init__(self, data):
|
||||
for key in self.required_keys:
|
||||
if key not in data:
|
||||
raise MissingRequiredKey(key)
|
||||
|
||||
if not data.get("log"):
|
||||
data["log"] = Logger.get_logger("EnvironmentPrepData")
|
||||
|
||||
if data.get("env") is None:
|
||||
data["env"] = os.environ.copy()
|
||||
|
||||
project_name = data["project_entity"]["name"]
|
||||
if "project_settings" not in data:
|
||||
data["project_settings"] = get_project_settings(project_name)
|
||||
|
||||
super(EnvironmentPrepData, self).__init__(data)
|
||||
|
||||
|
||||
def get_app_environments_for_context(
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
app_name,
|
||||
env_group=None,
|
||||
launch_type=None,
|
||||
env=None,
|
||||
addons_manager=None
|
||||
):
|
||||
"""Prepare environment variables by context.
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
folder_path (str): Folder path.
|
||||
task_name (str): Name of task.
|
||||
app_name (str): Name of application that is launched and can be found
|
||||
by ApplicationManager.
|
||||
env_group (Optional[str]): Name of environment group. If not passed
|
||||
default group is used.
|
||||
launch_type (Optional[str]): Type for which prelaunch hooks are
|
||||
executed.
|
||||
env (Optional[dict[str, str]]): Initial environment variables.
|
||||
`os.environ` is used when not passed.
|
||||
addons_manager (Optional[AddonsManager]): Initialized modules
|
||||
manager.
|
||||
|
||||
Returns:
|
||||
dict: Environments for passed context and application.
|
||||
"""
|
||||
|
||||
# Prepare app object which can be obtained only from ApplicationManager
|
||||
app_manager = ApplicationManager()
|
||||
context = app_manager.create_launch_context(
|
||||
app_name,
|
||||
project_name=project_name,
|
||||
folder_path=folder_path,
|
||||
task_name=task_name,
|
||||
env_group=env_group,
|
||||
launch_type=launch_type,
|
||||
env=env,
|
||||
addons_manager=addons_manager,
|
||||
modules_manager=addons_manager,
|
||||
)
|
||||
context.run_prelaunch_hooks()
|
||||
return context.env
|
||||
|
||||
|
||||
def _merge_env(env, current_env):
|
||||
"""Modified function(merge) from acre module."""
|
||||
result = current_env.copy()
|
||||
for key, value in env.items():
|
||||
# Keep missing keys by not filling `missing` kwarg
|
||||
value = acre.lib.partial_format(value, data=current_env)
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
|
||||
def _add_python_version_paths(app, env, logger, addons_manager):
|
||||
"""Add vendor packages specific for a Python version."""
|
||||
|
||||
for addon in addons_manager.get_enabled_addons():
|
||||
addon.modify_application_launch_arguments(app, env)
|
||||
|
||||
# Skip adding if host name is not set
|
||||
if not app.host_name:
|
||||
return
|
||||
|
||||
# Add Python 2/3 modules
|
||||
python_vendor_dir = os.path.join(
|
||||
AYON_CORE_ROOT,
|
||||
"vendor",
|
||||
"python"
|
||||
)
|
||||
if app.use_python_2:
|
||||
pythonpath = os.path.join(python_vendor_dir, "python_2")
|
||||
else:
|
||||
pythonpath = os.path.join(python_vendor_dir, "python_3")
|
||||
|
||||
if not os.path.exists(pythonpath):
|
||||
return
|
||||
|
||||
logger.debug("Adding Python version specific paths to PYTHONPATH")
|
||||
python_paths = [pythonpath]
|
||||
|
||||
# Load PYTHONPATH from current launch context
|
||||
python_path = env.get("PYTHONPATH")
|
||||
if python_path:
|
||||
python_paths.append(python_path)
|
||||
|
||||
# Set new PYTHONPATH to launch context environments
|
||||
env["PYTHONPATH"] = os.pathsep.join(python_paths)
|
||||
|
||||
|
||||
def prepare_app_environments(
|
||||
data, env_group=None, implementation_envs=True, addons_manager=None
|
||||
):
|
||||
"""Modify launch environments based on launched app and context.
|
||||
|
||||
Args:
|
||||
data (EnvironmentPrepData): Dictionary where result and intermediate
|
||||
result will be stored.
|
||||
|
||||
"""
|
||||
app = data["app"]
|
||||
log = data["log"]
|
||||
source_env = data["env"].copy()
|
||||
|
||||
if addons_manager is None:
|
||||
addons_manager = AddonsManager()
|
||||
|
||||
_add_python_version_paths(app, source_env, log, addons_manager)
|
||||
|
||||
# Use environments from local settings
|
||||
filtered_local_envs = {}
|
||||
# NOTE Overrides for environment variables are not implemented in AYON.
|
||||
# project_settings = data["project_settings"]
|
||||
# whitelist_envs = project_settings["general"].get("local_env_white_list")
|
||||
# if whitelist_envs:
|
||||
# local_settings = get_local_settings()
|
||||
# local_envs = local_settings.get("environments") or {}
|
||||
# filtered_local_envs = {
|
||||
# key: value
|
||||
# for key, value in local_envs.items()
|
||||
# if key in whitelist_envs
|
||||
# }
|
||||
|
||||
# Apply local environment variables for already existing values
|
||||
for key, value in filtered_local_envs.items():
|
||||
if key in source_env:
|
||||
source_env[key] = value
|
||||
|
||||
# `app_and_tool_labels` has debug purpose
|
||||
app_and_tool_labels = [app.full_name]
|
||||
# Environments for application
|
||||
environments = [
|
||||
app.group.environment,
|
||||
app.environment
|
||||
]
|
||||
|
||||
folder_entity = data.get("folder_entity")
|
||||
# Add tools environments
|
||||
groups_by_name = {}
|
||||
tool_by_group_name = collections.defaultdict(dict)
|
||||
if folder_entity:
|
||||
# Make sure each tool group can be added only once
|
||||
for key in folder_entity["attrib"].get("tools") or []:
|
||||
tool = app.manager.tools.get(key)
|
||||
if not tool or not tool.is_valid_for_app(app):
|
||||
continue
|
||||
groups_by_name[tool.group.name] = tool.group
|
||||
tool_by_group_name[tool.group.name][tool.name] = tool
|
||||
|
||||
for group_name in sorted(groups_by_name.keys()):
|
||||
group = groups_by_name[group_name]
|
||||
environments.append(group.environment)
|
||||
for tool_name in sorted(tool_by_group_name[group_name].keys()):
|
||||
tool = tool_by_group_name[group_name][tool_name]
|
||||
environments.append(tool.environment)
|
||||
app_and_tool_labels.append(tool.full_name)
|
||||
|
||||
log.debug(
|
||||
"Will add environments for apps and tools: {}".format(
|
||||
", ".join(app_and_tool_labels)
|
||||
)
|
||||
)
|
||||
|
||||
env_values = {}
|
||||
for _env_values in environments:
|
||||
if not _env_values:
|
||||
continue
|
||||
|
||||
# Choose right platform
|
||||
tool_env = parse_environments(_env_values, env_group)
|
||||
|
||||
# Apply local environment variables
|
||||
# - must happen between all values because they may be used during
|
||||
# merge
|
||||
for key, value in filtered_local_envs.items():
|
||||
if key in tool_env:
|
||||
tool_env[key] = value
|
||||
|
||||
# Merge dictionaries
|
||||
env_values = _merge_env(tool_env, env_values)
|
||||
|
||||
merged_env = _merge_env(env_values, source_env)
|
||||
|
||||
loaded_env = acre.compute(merged_env, cleanup=False)
|
||||
|
||||
final_env = None
|
||||
# Add host specific environments
|
||||
if app.host_name and implementation_envs:
|
||||
host_addon = addons_manager.get_host_addon(app.host_name)
|
||||
add_implementation_envs = None
|
||||
if host_addon:
|
||||
add_implementation_envs = getattr(
|
||||
host_addon, "add_implementation_envs", None
|
||||
)
|
||||
if add_implementation_envs:
|
||||
# Function may only modify passed dict without returning value
|
||||
final_env = add_implementation_envs(loaded_env, app)
|
||||
|
||||
if final_env is None:
|
||||
final_env = loaded_env
|
||||
|
||||
keys_to_remove = set(source_env.keys()) - set(final_env.keys())
|
||||
|
||||
# Update env
|
||||
data["env"].update(final_env)
|
||||
for key in keys_to_remove:
|
||||
data["env"].pop(key, None)
|
||||
|
||||
|
||||
def apply_project_environments_value(
|
||||
project_name, env, project_settings=None, env_group=None
|
||||
):
|
||||
"""Apply project specific environments on passed environments.
|
||||
|
||||
The environments are applied on passed `env` argument value so it is not
|
||||
required to apply changes back.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project for which environments should be
|
||||
received.
|
||||
env (dict): Environment values on which project specific environments
|
||||
will be applied.
|
||||
project_settings (dict): Project settings for passed project name.
|
||||
Optional if project settings are already prepared.
|
||||
|
||||
Returns:
|
||||
dict: Passed env values with applied project environments.
|
||||
|
||||
Raises:
|
||||
KeyError: If project settings do not contain keys for project specific
|
||||
environments.
|
||||
|
||||
"""
|
||||
if project_settings is None:
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
env_value = project_settings["core"]["project_environments"]
|
||||
if env_value:
|
||||
env_value = json.loads(env_value)
|
||||
parsed_value = parse_environments(env_value, env_group)
|
||||
env.update(acre.compute(
|
||||
_merge_env(parsed_value, env),
|
||||
cleanup=False
|
||||
))
|
||||
return env
|
||||
|
||||
|
||||
def prepare_context_environments(data, env_group=None, addons_manager=None):
|
||||
"""Modify launch environments with context data for launched host.
|
||||
|
||||
Args:
|
||||
data (EnvironmentPrepData): Dictionary where result and intermediate
|
||||
result will be stored.
|
||||
|
||||
"""
|
||||
# Context environments
|
||||
log = data["log"]
|
||||
|
||||
project_entity = data["project_entity"]
|
||||
folder_entity = data["folder_entity"]
|
||||
task_entity = data["task_entity"]
|
||||
if not project_entity:
|
||||
log.info(
|
||||
"Skipping context environments preparation."
|
||||
" Launch context does not contain required data."
|
||||
)
|
||||
return
|
||||
|
||||
# Load project specific environments
|
||||
project_name = project_entity["name"]
|
||||
project_settings = get_project_settings(project_name)
|
||||
data["project_settings"] = project_settings
|
||||
|
||||
app = data["app"]
|
||||
context_env = {
|
||||
"AYON_PROJECT_NAME": project_entity["name"],
|
||||
"AYON_APP_NAME": app.full_name
|
||||
}
|
||||
if folder_entity:
|
||||
folder_path = folder_entity["path"]
|
||||
context_env["AYON_FOLDER_PATH"] = folder_path
|
||||
|
||||
if task_entity:
|
||||
context_env["AYON_TASK_NAME"] = task_entity["name"]
|
||||
|
||||
log.debug(
|
||||
"Context environments set:\n{}".format(
|
||||
json.dumps(context_env, indent=4)
|
||||
)
|
||||
)
|
||||
data["env"].update(context_env)
|
||||
|
||||
# Apply project specific environments on current env value
|
||||
# - apply them once the context environments are set
|
||||
apply_project_environments_value(
|
||||
project_name, data["env"], project_settings, env_group
|
||||
)
|
||||
|
||||
if not app.is_host:
|
||||
return
|
||||
|
||||
data["env"]["AYON_HOST_NAME"] = app.host_name
|
||||
|
||||
if not folder_entity or not task_entity:
|
||||
# QUESTION replace with log.info and skip workfile discovery?
|
||||
# - technically it should be possible to launch host without context
|
||||
raise ApplicationLaunchFailed(
|
||||
"Host launch require folder and task context."
|
||||
)
|
||||
|
||||
workdir_data = get_template_data(
|
||||
project_entity,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
app.host_name,
|
||||
project_settings
|
||||
)
|
||||
data["workdir_data"] = workdir_data
|
||||
|
||||
anatomy = data["anatomy"]
|
||||
|
||||
task_type = workdir_data["task"]["type"]
|
||||
# Temp solution how to pass task type to `_prepare_last_workfile`
|
||||
data["task_type"] = task_type
|
||||
|
||||
try:
|
||||
workdir = get_workdir_with_workdir_data(
|
||||
workdir_data,
|
||||
anatomy.project_name,
|
||||
anatomy,
|
||||
project_settings=project_settings
|
||||
)
|
||||
|
||||
except Exception as exc:
|
||||
raise ApplicationLaunchFailed(
|
||||
"Error in anatomy.format: {}".format(str(exc))
|
||||
)
|
||||
|
||||
if not os.path.exists(workdir):
|
||||
log.debug(
|
||||
"Creating workdir folder: \"{}\"".format(workdir)
|
||||
)
|
||||
try:
|
||||
os.makedirs(workdir)
|
||||
except Exception as exc:
|
||||
raise ApplicationLaunchFailed(
|
||||
"Couldn't create workdir because: {}".format(str(exc))
|
||||
)
|
||||
|
||||
data["env"]["AYON_WORKDIR"] = workdir
|
||||
|
||||
_prepare_last_workfile(data, workdir, addons_manager)
|
||||
|
||||
|
||||
def _prepare_last_workfile(data, workdir, addons_manager):
|
||||
"""last workfile workflow preparation.
|
||||
|
||||
Function check if should care about last workfile workflow and tries
|
||||
to find the last workfile. Both information are stored to `data` and
|
||||
environments.
|
||||
|
||||
Last workfile is filled always (with version 1) even if any workfile
|
||||
exists yet.
|
||||
|
||||
Args:
|
||||
data (EnvironmentPrepData): Dictionary where result and intermediate
|
||||
result will be stored.
|
||||
workdir (str): Path to folder where workfiles should be stored.
|
||||
|
||||
"""
|
||||
if not addons_manager:
|
||||
addons_manager = AddonsManager()
|
||||
|
||||
log = data["log"]
|
||||
|
||||
_workdir_data = data.get("workdir_data")
|
||||
if not _workdir_data:
|
||||
log.info(
|
||||
"Skipping last workfile preparation."
|
||||
" Key `workdir_data` not filled."
|
||||
)
|
||||
return
|
||||
|
||||
app = data["app"]
|
||||
workdir_data = copy.deepcopy(_workdir_data)
|
||||
project_name = data["project_name"]
|
||||
task_name = data["task_name"]
|
||||
task_type = data["task_type"]
|
||||
|
||||
start_last_workfile = data.get("start_last_workfile")
|
||||
if start_last_workfile is None:
|
||||
start_last_workfile = should_use_last_workfile_on_launch(
|
||||
project_name, app.host_name, task_name, task_type
|
||||
)
|
||||
else:
|
||||
log.info("Opening of last workfile was disabled by user")
|
||||
|
||||
data["start_last_workfile"] = start_last_workfile
|
||||
|
||||
workfile_startup = should_open_workfiles_tool_on_launch(
|
||||
project_name, app.host_name, task_name, task_type
|
||||
)
|
||||
data["workfile_startup"] = workfile_startup
|
||||
|
||||
# Store boolean as "0"(False) or "1"(True)
|
||||
data["env"]["AVALON_OPEN_LAST_WORKFILE"] = (
|
||||
str(int(bool(start_last_workfile)))
|
||||
)
|
||||
data["env"]["AYON_WORKFILE_TOOL_ON_START"] = (
|
||||
str(int(bool(workfile_startup)))
|
||||
)
|
||||
|
||||
_sub_msg = "" if start_last_workfile else " not"
|
||||
log.debug(
|
||||
"Last workfile should{} be opened on start.".format(_sub_msg)
|
||||
)
|
||||
|
||||
# Last workfile path
|
||||
last_workfile_path = data.get("last_workfile_path") or ""
|
||||
if not last_workfile_path:
|
||||
host_addon = addons_manager.get_host_addon(app.host_name)
|
||||
if host_addon:
|
||||
extensions = host_addon.get_workfile_extensions()
|
||||
else:
|
||||
extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name)
|
||||
|
||||
if extensions:
|
||||
anatomy = data["anatomy"]
|
||||
project_settings = data["project_settings"]
|
||||
task_type = workdir_data["task"]["type"]
|
||||
template_key = get_workfile_template_key(
|
||||
project_name,
|
||||
task_type,
|
||||
app.host_name,
|
||||
project_settings=project_settings
|
||||
)
|
||||
# Find last workfile
|
||||
file_template = anatomy.get_template_item(
|
||||
"work", template_key, "file"
|
||||
).template
|
||||
|
||||
workdir_data.update({
|
||||
"version": 1,
|
||||
"user": get_ayon_username(),
|
||||
"ext": extensions[0]
|
||||
})
|
||||
|
||||
last_workfile_path = get_last_workfile(
|
||||
workdir, file_template, workdir_data, extensions, True
|
||||
)
|
||||
|
||||
if os.path.exists(last_workfile_path):
|
||||
log.debug((
|
||||
"Workfiles for launch context does not exists"
|
||||
" yet but path will be set."
|
||||
))
|
||||
log.debug(
|
||||
"Setting last workfile path: {}".format(last_workfile_path)
|
||||
)
|
||||
|
||||
data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path
|
||||
data["last_workfile_path"] = last_workfile_path
|
||||
3
server_addon/applications/package.py
Normal file
3
server_addon/applications/package.py
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
name = "applications"
|
||||
title = "Applications"
|
||||
version = "0.2.0"
|
||||
|
|
@ -3,9 +3,9 @@ import json
|
|||
import copy
|
||||
|
||||
from ayon_server.addons import BaseServerAddon, AddonLibrary
|
||||
from ayon_server.entities.core import attribute_library
|
||||
from ayon_server.lib.postgres import Postgres
|
||||
|
||||
from .version import __version__
|
||||
from .settings import ApplicationsAddonSettings, DEFAULT_VALUES
|
||||
|
||||
try:
|
||||
|
|
@ -86,9 +86,6 @@ def get_enum_items_from_groups(groups):
|
|||
|
||||
|
||||
class ApplicationsAddon(BaseServerAddon):
|
||||
name = "applications"
|
||||
title = "Applications"
|
||||
version = __version__
|
||||
settings_model = ApplicationsAddonSettings
|
||||
|
||||
async def get_default_settings(self):
|
||||
|
|
@ -118,9 +115,28 @@ class ApplicationsAddon(BaseServerAddon):
|
|||
)
|
||||
|
||||
async def setup(self):
|
||||
need_restart = await self.create_applications_attribute()
|
||||
need_restart = await self.create_required_attributes()
|
||||
if need_restart:
|
||||
self.request_server_restart()
|
||||
await self._update_enums()
|
||||
|
||||
def _get_applications_def(self):
|
||||
return {
|
||||
"name": "applications",
|
||||
"type": "list_of_strings",
|
||||
"title": "Applications",
|
||||
"scope": ["project"],
|
||||
"enum":[],
|
||||
}
|
||||
|
||||
def _get_tools_def(self):
|
||||
return {
|
||||
"name": "tools",
|
||||
"type": "list_of_strings",
|
||||
"title": "Tools",
|
||||
"scope": ["project", "folder", "task"],
|
||||
"enum":[],
|
||||
}
|
||||
|
||||
async def create_applications_attribute(self) -> bool:
|
||||
"""Make sure there are required attributes which ftrack addon needs.
|
||||
|
|
@ -129,6 +145,73 @@ class ApplicationsAddon(BaseServerAddon):
|
|||
bool: 'True' if an attribute was created or updated.
|
||||
"""
|
||||
|
||||
need_restart = await self.create_required_attributes()
|
||||
await self._update_enums()
|
||||
return need_restart
|
||||
|
||||
async def create_required_attributes(self) -> bool:
|
||||
"""Make sure there are required 'applications' and 'tools' attributes.
|
||||
This only checks for the existence of the attributes, it does not populate
|
||||
them with any data. When an attribute is added, server needs to be restarted,
|
||||
while adding enum data to the attribute does not require a restart.
|
||||
Returns:
|
||||
bool: 'True' if an attribute was created or updated.
|
||||
"""
|
||||
|
||||
# keep track of the last attribute position (for adding new attributes)
|
||||
apps_attribute_data = self._get_applications_def()
|
||||
tools_attribute_data = self._get_tools_def()
|
||||
|
||||
apps_attrib_name = apps_attribute_data["name"]
|
||||
tools_attrib_name = tools_attribute_data["name"]
|
||||
|
||||
async with Postgres.acquire() as conn, conn.transaction():
|
||||
query = "SELECT BOOL_OR(name = 'applications') AS has_applications, BOOL_OR(name = 'tools') AS has_tools FROM attributes;"
|
||||
result = (await conn.fetch(query))[0]
|
||||
|
||||
attributes_to_create = {}
|
||||
if not result["has_applications"]:
|
||||
attributes_to_create[apps_attrib_name] = {
|
||||
"scope": apps_attribute_data["scope"],
|
||||
"data": {
|
||||
"title": apps_attribute_data["title"],
|
||||
"type": apps_attribute_data["type"],
|
||||
"enum": [],
|
||||
}
|
||||
}
|
||||
|
||||
if not result["has_tools"]:
|
||||
attributes_to_create[tools_attrib_name] = {
|
||||
"scope": tools_attribute_data["scope"],
|
||||
"data": {
|
||||
"title": tools_attribute_data["title"],
|
||||
"type": tools_attribute_data["type"],
|
||||
"enum": [],
|
||||
},
|
||||
}
|
||||
|
||||
needs_restart = False
|
||||
# when any of the required attributes are not present, add them
|
||||
# and return 'True' to indicate that server needs to be restarted
|
||||
for name, payload in attributes_to_create.items():
|
||||
insert_query = "INSERT INTO attributes (name, scope, data, position) VALUES ($1, $2, $3, (SELECT COALESCE(MAX(position), 0) + 1 FROM attributes)) ON CONFLICT DO NOTHING"
|
||||
await conn.execute(
|
||||
insert_query,
|
||||
name,
|
||||
payload["scope"],
|
||||
payload["data"],
|
||||
)
|
||||
needs_restart = True
|
||||
|
||||
return needs_restart
|
||||
|
||||
async def _update_enums(self):
|
||||
"""Updates applications and tools enums based on the addon settings.
|
||||
This method is called when the addon is started (after we are sure that the
|
||||
'applications' and 'tools' attributes exist) and when the addon settings are
|
||||
updated (using on_settings_updated method).
|
||||
"""
|
||||
|
||||
instance = AddonLibrary.getinstance()
|
||||
app_defs = instance.data.get(self.name)
|
||||
all_applications = []
|
||||
|
|
@ -148,33 +231,32 @@ class ApplicationsAddon(BaseServerAddon):
|
|||
merge_groups(all_applications, app_groups)
|
||||
merge_groups(all_tools, studio_settings["tool_groups"])
|
||||
|
||||
query = "SELECT name, position, scope, data from public.attributes"
|
||||
|
||||
apps_attrib_name = "applications"
|
||||
tools_attrib_name = "tools"
|
||||
|
||||
apps_enum = get_enum_items_from_groups(all_applications)
|
||||
tools_enum = get_enum_items_from_groups(all_tools)
|
||||
|
||||
apps_attribute_data = {
|
||||
"type": "list_of_strings",
|
||||
"title": "Applications",
|
||||
"enum": apps_enum
|
||||
"enum": apps_enum,
|
||||
}
|
||||
tools_attribute_data = {
|
||||
"type": "list_of_strings",
|
||||
"title": "Tools",
|
||||
"enum": tools_enum
|
||||
"enum": tools_enum,
|
||||
}
|
||||
|
||||
apps_scope = ["project"]
|
||||
tools_scope = ["project", "folder", "task"]
|
||||
|
||||
apps_match_position = None
|
||||
apps_matches = False
|
||||
tools_match_position = None
|
||||
tools_matches = False
|
||||
position = 1
|
||||
async for row in Postgres.iterate(query):
|
||||
position += 1
|
||||
|
||||
async for row in Postgres.iterate(
|
||||
"SELECT name, position, scope, data from public.attributes"
|
||||
):
|
||||
if row["name"] == apps_attrib_name:
|
||||
# Check if scope is matching ftrack addon requirements
|
||||
if (
|
||||
|
|
@ -182,7 +264,6 @@ class ApplicationsAddon(BaseServerAddon):
|
|||
and row["data"].get("enum") == apps_enum
|
||||
):
|
||||
apps_matches = True
|
||||
apps_match_position = row["position"]
|
||||
|
||||
elif row["name"] == tools_attrib_name:
|
||||
if (
|
||||
|
|
@ -190,45 +271,41 @@ class ApplicationsAddon(BaseServerAddon):
|
|||
and row["data"].get("enum") == tools_enum
|
||||
):
|
||||
tools_matches = True
|
||||
tools_match_position = row["position"]
|
||||
|
||||
if apps_matches and tools_matches:
|
||||
return False
|
||||
return
|
||||
|
||||
postgre_query = "\n".join((
|
||||
"INSERT INTO public.attributes",
|
||||
" (name, position, scope, data)",
|
||||
"VALUES",
|
||||
" ($1, $2, $3, $4)",
|
||||
"ON CONFLICT (name)",
|
||||
"DO UPDATE SET",
|
||||
" scope = $3,",
|
||||
" data = $4",
|
||||
))
|
||||
if not apps_matches:
|
||||
# Reuse position from found attribute
|
||||
if apps_match_position is None:
|
||||
apps_match_position = position
|
||||
position += 1
|
||||
|
||||
await Postgres.execute(
|
||||
postgre_query,
|
||||
apps_attrib_name,
|
||||
apps_match_position,
|
||||
"""
|
||||
UPDATE attributes SET
|
||||
scope = $1,
|
||||
data = $2
|
||||
WHERE
|
||||
name = $3
|
||||
""",
|
||||
apps_scope,
|
||||
apps_attribute_data,
|
||||
apps_attrib_name,
|
||||
)
|
||||
|
||||
if not tools_matches:
|
||||
if tools_match_position is None:
|
||||
tools_match_position = position
|
||||
position += 1
|
||||
|
||||
await Postgres.execute(
|
||||
postgre_query,
|
||||
tools_attrib_name,
|
||||
tools_match_position,
|
||||
"""
|
||||
UPDATE attributes SET
|
||||
scope = $1,
|
||||
data = $2
|
||||
WHERE
|
||||
name = $3
|
||||
""",
|
||||
tools_scope,
|
||||
tools_attribute_data,
|
||||
tools_attrib_name,
|
||||
)
|
||||
return True
|
||||
|
||||
# Reset attributes cache on server
|
||||
await attribute_library.load()
|
||||
|
||||
async def on_settings_changed(self, *args, **kwargs):
|
||||
_ = args, kwargs
|
||||
await self._update_enums()
|
||||
|
|
|
|||
|
|
@ -7,6 +7,26 @@
|
|||
"host_name": "maya",
|
||||
"environment": "{\n \"MAYA_DISABLE_CLIC_IPM\": \"Yes\",\n \"MAYA_DISABLE_CIP\": \"Yes\",\n \"MAYA_DISABLE_CER\": \"Yes\",\n \"PYMEL_SKIP_MEL_INIT\": \"Yes\",\n \"LC_ALL\": \"C\"\n}\n",
|
||||
"variants": [
|
||||
{
|
||||
"name": "2025",
|
||||
"label": "2025",
|
||||
"executables": {
|
||||
"windows": [
|
||||
"C:\\Program Files\\Autodesk\\Maya2025\\bin\\maya.exe"
|
||||
],
|
||||
"darwin": ["/Applications/Autodesk/maya2025/Maya.app"],
|
||||
"linux": [
|
||||
"/usr/autodesk/maya2025/bin/maya"
|
||||
]
|
||||
},
|
||||
"arguments": {
|
||||
"windows": [],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
"environment": "{\n \"MAYA_VERSION\": \"2025\"\n}",
|
||||
"use_python_2": false
|
||||
},
|
||||
{
|
||||
"name": "2024",
|
||||
"label": "2024",
|
||||
|
|
|
|||
|
|
@ -1 +0,0 @@
|
|||
__version__ = "0.1.8"
|
||||
|
|
@ -6,7 +6,7 @@ from ayon_server.settings import (
|
|||
|
||||
from .imageio import BlenderImageIOModel
|
||||
from .publish_plugins import (
|
||||
PublishPuginsModel,
|
||||
PublishPluginsModel,
|
||||
DEFAULT_BLENDER_PUBLISH_SETTINGS
|
||||
)
|
||||
from .render_settings import (
|
||||
|
|
@ -47,8 +47,8 @@ class BlenderSettings(BaseSettingsModel):
|
|||
default_factory=TemplateWorkfileBaseOptions,
|
||||
title="Workfile Builder"
|
||||
)
|
||||
publish: PublishPuginsModel = SettingsField(
|
||||
default_factory=PublishPuginsModel,
|
||||
publish: PublishPluginsModel = SettingsField(
|
||||
default_factory=PublishPluginsModel,
|
||||
title="Publish Plugins"
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ class ExtractPlayblastModel(BaseSettingsModel):
|
|||
return validate_json_dict(value)
|
||||
|
||||
|
||||
class PublishPuginsModel(BaseSettingsModel):
|
||||
class PublishPluginsModel(BaseSettingsModel):
|
||||
ValidateCameraZeroKeyframe: ValidatePluginModel = SettingsField(
|
||||
default_factory=ValidatePluginModel,
|
||||
title="Validate Camera Zero Keyframe",
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ class WorkfileModel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class PublishPuginsModel(BaseSettingsModel):
|
||||
class PublishPluginsModel(BaseSettingsModel):
|
||||
CollectRenderPath: CollectRenderPathModel = SettingsField(
|
||||
default_factory=CollectRenderPathModel,
|
||||
title="Collect Render Path"
|
||||
|
|
@ -57,8 +57,8 @@ class CelActionSettings(BaseSettingsModel):
|
|||
workfile: WorkfileModel = SettingsField(
|
||||
title="Workfile"
|
||||
)
|
||||
publish: PublishPuginsModel = SettingsField(
|
||||
default_factory=PublishPuginsModel,
|
||||
publish: PublishPluginsModel = SettingsField(
|
||||
default_factory=PublishPluginsModel,
|
||||
title="Publish plugins",
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ import re
|
|||
import shutil
|
||||
import argparse
|
||||
import zipfile
|
||||
import types
|
||||
import importlib
|
||||
import platform
|
||||
import collections
|
||||
from pathlib import Path
|
||||
|
|
@ -44,6 +46,11 @@ version = "{addon_version}"
|
|||
plugin_for = ["ayon_server"]
|
||||
"""
|
||||
|
||||
CLIENT_VERSION_CONTENT = '''# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON core addon version."""
|
||||
__version__ = "{}"
|
||||
'''
|
||||
|
||||
|
||||
class ZipFileLongPaths(zipfile.ZipFile):
|
||||
"""Allows longer paths in zip files.
|
||||
|
|
@ -175,13 +182,75 @@ def create_addon_zip(
|
|||
shutil.rmtree(str(output_dir / addon_name))
|
||||
|
||||
|
||||
def prepare_client_code(
|
||||
addon_dir: Path,
|
||||
addon_output_dir: Path,
|
||||
addon_version: str
|
||||
):
|
||||
client_dir = addon_dir / "client"
|
||||
if not client_dir.exists():
|
||||
return
|
||||
|
||||
# Prepare private dir in output
|
||||
private_dir = addon_output_dir / "private"
|
||||
private_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Copy pyproject toml if available
|
||||
pyproject_toml = client_dir / "pyproject.toml"
|
||||
if pyproject_toml.exists():
|
||||
shutil.copy(pyproject_toml, private_dir)
|
||||
|
||||
for subpath in client_dir.iterdir():
|
||||
if subpath.name == "pyproject.toml":
|
||||
continue
|
||||
|
||||
if subpath.is_file():
|
||||
continue
|
||||
|
||||
# Update version.py with server version if 'version.py' is available
|
||||
version_path = subpath / "version.py"
|
||||
if version_path.exists():
|
||||
with open(version_path, "w") as stream:
|
||||
stream.write(CLIENT_VERSION_CONTENT.format(addon_version))
|
||||
|
||||
zip_filepath = private_dir / "client.zip"
|
||||
with ZipFileLongPaths(zip_filepath, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
# Add client code content to zip
|
||||
for path, sub_path in find_files_in_subdir(str(subpath)):
|
||||
sub_path = os.path.join(subpath.name, sub_path)
|
||||
zipf.write(path, sub_path)
|
||||
|
||||
|
||||
def import_filepath(path: Path, module_name: Optional[str] = None):
|
||||
if not module_name:
|
||||
module_name = os.path.splitext(path.name)[0]
|
||||
|
||||
# Convert to string
|
||||
path = str(path)
|
||||
module = types.ModuleType(module_name)
|
||||
module.__file__ = path
|
||||
|
||||
# Use loader so module has full specs
|
||||
module_loader = importlib.machinery.SourceFileLoader(
|
||||
module_name, path
|
||||
)
|
||||
module_loader.exec_module(module)
|
||||
return module
|
||||
|
||||
|
||||
def create_addon_package(
|
||||
addon_dir: Path,
|
||||
output_dir: Path,
|
||||
create_zip: bool,
|
||||
keep_source: bool,
|
||||
):
|
||||
addon_version = get_addon_version(addon_dir)
|
||||
src_package_py = addon_dir / "package.py"
|
||||
package = None
|
||||
if src_package_py.exists():
|
||||
package = import_filepath(src_package_py)
|
||||
addon_version = package.version
|
||||
else:
|
||||
addon_version = get_addon_version(addon_dir)
|
||||
|
||||
addon_output_dir = output_dir / addon_dir.name / addon_version
|
||||
if addon_output_dir.exists():
|
||||
|
|
@ -189,22 +258,27 @@ def create_addon_package(
|
|||
addon_output_dir.mkdir(parents=True)
|
||||
|
||||
# Copy server content
|
||||
package_py = addon_output_dir / "package.py"
|
||||
addon_name = addon_dir.name
|
||||
if addon_name == "royal_render":
|
||||
addon_name = "royalrender"
|
||||
package_py_content = PACKAGE_PY_TEMPLATE.format(
|
||||
addon_name=addon_name, addon_version=addon_version
|
||||
)
|
||||
dst_package_py = addon_output_dir / "package.py"
|
||||
if package is not None:
|
||||
shutil.copy(src_package_py, dst_package_py)
|
||||
else:
|
||||
addon_name = addon_dir.name
|
||||
if addon_name == "royal_render":
|
||||
addon_name = "royalrender"
|
||||
package_py_content = PACKAGE_PY_TEMPLATE.format(
|
||||
addon_name=addon_name, addon_version=addon_version
|
||||
)
|
||||
|
||||
with open(package_py, "w+") as pkg_py:
|
||||
pkg_py.write(package_py_content)
|
||||
with open(dst_package_py, "w+") as pkg_py:
|
||||
pkg_py.write(package_py_content)
|
||||
|
||||
server_dir = addon_dir / "server"
|
||||
shutil.copytree(
|
||||
server_dir, addon_output_dir / "server", dirs_exist_ok=True
|
||||
)
|
||||
|
||||
prepare_client_code(addon_dir, addon_output_dir, addon_version)
|
||||
|
||||
if create_zip:
|
||||
create_addon_zip(
|
||||
output_dir, addon_dir.name, addon_version, keep_source
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from typing import TYPE_CHECKING
|
||||
from pydantic import validator
|
||||
|
||||
from ayon_server.settings import (
|
||||
|
|
@ -5,6 +6,8 @@ from ayon_server.settings import (
|
|||
SettingsField,
|
||||
ensure_unique_names,
|
||||
)
|
||||
if TYPE_CHECKING:
|
||||
from ayon_server.addons import BaseServerAddon
|
||||
|
||||
from .publish_plugins import (
|
||||
PublishPluginsModel,
|
||||
|
|
@ -19,7 +22,7 @@ class ServerListSubmodel(BaseSettingsModel):
|
|||
|
||||
|
||||
async def defined_deadline_ws_name_enum_resolver(
|
||||
addon: "BaseServerAddon",
|
||||
addon: BaseServerAddon,
|
||||
settings_variant: str = "production",
|
||||
project_name: str | None = None,
|
||||
) -> list[str]:
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ class CreateShotClipModel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class CreatePuginsModel(BaseSettingsModel):
|
||||
class CreatePluginsModel(BaseSettingsModel):
|
||||
CreateShotClip: CreateShotClipModel = SettingsField(
|
||||
default_factory=CreateShotClipModel,
|
||||
title="Create Shot Clip"
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from ayon_server.settings import BaseSettingsModel, SettingsField
|
||||
|
||||
from .imageio import FlameImageIOModel, DEFAULT_IMAGEIO_SETTINGS
|
||||
from .create_plugins import CreatePuginsModel, DEFAULT_CREATE_SETTINGS
|
||||
from .publish_plugins import PublishPuginsModel, DEFAULT_PUBLISH_SETTINGS
|
||||
from .create_plugins import CreatePluginsModel, DEFAULT_CREATE_SETTINGS
|
||||
from .publish_plugins import PublishPluginsModel, DEFAULT_PUBLISH_SETTINGS
|
||||
from .loader_plugins import LoaderPluginsModel, DEFAULT_LOADER_SETTINGS
|
||||
|
||||
|
||||
|
|
@ -11,12 +11,12 @@ class FlameSettings(BaseSettingsModel):
|
|||
default_factory=FlameImageIOModel,
|
||||
title="Color Management (ImageIO)"
|
||||
)
|
||||
create: CreatePuginsModel = SettingsField(
|
||||
default_factory=CreatePuginsModel,
|
||||
create: CreatePluginsModel = SettingsField(
|
||||
default_factory=CreatePluginsModel,
|
||||
title="Create plugins"
|
||||
)
|
||||
publish: PublishPuginsModel = SettingsField(
|
||||
default_factory=PublishPuginsModel,
|
||||
publish: PublishPluginsModel = SettingsField(
|
||||
default_factory=PublishPluginsModel,
|
||||
title="Publish plugins"
|
||||
)
|
||||
load: LoaderPluginsModel = SettingsField(
|
||||
|
|
|
|||
|
|
@ -121,7 +121,7 @@ class IntegrateBatchGroupModel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class PublishPuginsModel(BaseSettingsModel):
|
||||
class PublishPluginsModel(BaseSettingsModel):
|
||||
CollectTimelineInstances: CollectTimelineInstancesModel = SettingsField(
|
||||
default_factory=CollectTimelineInstancesModel,
|
||||
title="Collect Timeline Instances"
|
||||
|
|
|
|||
|
|
@ -75,6 +75,12 @@ class HooksModel(BaseSettingsModel):
|
|||
default_factory=HookOptionalModel,
|
||||
title="Install PySide2"
|
||||
)
|
||||
FusionLaunchMenuHook: HookOptionalModel = SettingsField(
|
||||
default_factory=HookOptionalModel,
|
||||
title="Launch AYON Menu on Fusion Start",
|
||||
description="Launch the AYON menu on Fusion application startup. "
|
||||
"This is only supported for Fusion 18+"
|
||||
)
|
||||
|
||||
|
||||
class CreateSaverModel(CreateSaverPluginModel):
|
||||
|
|
@ -143,6 +149,9 @@ DEFAULT_VALUES = {
|
|||
"hooks": {
|
||||
"InstallPySideToFusion": {
|
||||
"enabled": True
|
||||
},
|
||||
"FusionLaunchMenuHook": {
|
||||
"enabled": False
|
||||
}
|
||||
},
|
||||
"create": {
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "0.1.4"
|
||||
__version__ = "0.1.5"
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class LoadClipModel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class LoaderPuginsModel(BaseSettingsModel):
|
||||
class LoaderPluginsModel(BaseSettingsModel):
|
||||
LoadClip: LoadClipModel = SettingsField(
|
||||
default_factory=LoadClipModel,
|
||||
title="Load Clip"
|
||||
|
|
|
|||
|
|
@ -9,11 +9,11 @@ from .create_plugins import (
|
|||
DEFAULT_CREATE_SETTINGS
|
||||
)
|
||||
from .loader_plugins import (
|
||||
LoaderPuginsModel,
|
||||
LoaderPluginsModel,
|
||||
DEFAULT_LOADER_PLUGINS_SETTINGS
|
||||
)
|
||||
from .publish_plugins import (
|
||||
PublishPuginsModel,
|
||||
PublishPluginsModel,
|
||||
DEFAULT_PUBLISH_PLUGIN_SETTINGS
|
||||
)
|
||||
from .scriptsmenu import (
|
||||
|
|
@ -35,12 +35,12 @@ class HieroSettings(BaseSettingsModel):
|
|||
default_factory=CreatorPluginsSettings,
|
||||
title="Creator Plugins",
|
||||
)
|
||||
load: LoaderPuginsModel = SettingsField(
|
||||
default_factory=LoaderPuginsModel,
|
||||
load: LoaderPluginsModel = SettingsField(
|
||||
default_factory=LoaderPluginsModel,
|
||||
title="Loader plugins"
|
||||
)
|
||||
publish: PublishPuginsModel = SettingsField(
|
||||
default_factory=PublishPuginsModel,
|
||||
publish: PublishPluginsModel = SettingsField(
|
||||
default_factory=PublishPluginsModel,
|
||||
title="Publish plugins"
|
||||
)
|
||||
scriptsmenu: ScriptsmenuSettings = SettingsField(
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ class ExtractReviewCutUpVideoModel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class PublishPuginsModel(BaseSettingsModel):
|
||||
class PublishPluginsModel(BaseSettingsModel):
|
||||
CollectInstanceVersion: CollectInstanceVersionModel = SettingsField(
|
||||
default_factory=CollectInstanceVersionModel,
|
||||
title="Collect Instance Version"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,9 @@
|
|||
from ayon_server.settings import BaseSettingsModel, SettingsField
|
||||
from ayon_server.types import ColorRGB_float, ColorRGBA_uint8
|
||||
from ayon_server.types import ColorRGBA_uint8
|
||||
|
||||
|
||||
class LoaderEnabledModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
|
||||
|
||||
class ColorsSetting(BaseSettingsModel):
|
||||
|
|
@ -94,6 +98,7 @@ class ReferenceLoaderModel(BaseSettingsModel):
|
|||
|
||||
|
||||
class ImportLoaderModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
namespace: str = SettingsField(title="Namespace")
|
||||
group_name: str = SettingsField(title="Group name")
|
||||
|
||||
|
|
@ -113,6 +118,89 @@ class LoadersModel(BaseSettingsModel):
|
|||
title="Import Loader"
|
||||
)
|
||||
|
||||
# Enable/disable loaders
|
||||
ArnoldStandinLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Arnold Standin Loader"
|
||||
)
|
||||
AssemblyLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Assembly Loader"
|
||||
)
|
||||
AudioLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Audio Loader"
|
||||
)
|
||||
GpuCacheLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="GPU Cache Loader"
|
||||
)
|
||||
FileNodeLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="File Node (Image) Loader"
|
||||
)
|
||||
ImagePlaneLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Image Plane Loader"
|
||||
)
|
||||
LookLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Look Loader"
|
||||
)
|
||||
MatchmoveLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Matchmove Loader"
|
||||
)
|
||||
MultiverseUsdLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Multiverse USD Loader"
|
||||
)
|
||||
MultiverseUsdOverLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Multiverse USD Override Loader"
|
||||
)
|
||||
RedshiftProxyLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Redshift Proxy Loader"
|
||||
)
|
||||
RenderSetupLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Render Setup Loader"
|
||||
)
|
||||
LoadVDBtoArnold: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="VDB to Arnold Loader"
|
||||
)
|
||||
LoadVDBtoRedShift: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="VDB to Redshift Loader"
|
||||
)
|
||||
LoadVDBtoVRay: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="VDB to V-Ray Loader"
|
||||
)
|
||||
VRayProxyLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Vray Proxy Loader"
|
||||
)
|
||||
VRaySceneLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="VrayScene Loader"
|
||||
)
|
||||
XgenLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Xgen Loader"
|
||||
)
|
||||
YetiCacheLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Yeti Cache Loader"
|
||||
)
|
||||
YetiRigLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Yeti Rig Loader"
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_LOADERS_SETTING = {
|
||||
"colors": {
|
||||
"model": [209, 132, 30, 1.0],
|
||||
|
|
@ -154,8 +242,29 @@ DEFAULT_LOADERS_SETTING = {
|
|||
"display_handle": True
|
||||
},
|
||||
"import_loader": {
|
||||
"enabled": True,
|
||||
"namespace": "{folder[name]}_{product[name]}_##_",
|
||||
"group_name": "_GRP",
|
||||
"display_handle": True
|
||||
}
|
||||
},
|
||||
"ArnoldStandinLoader": {"enabled": True},
|
||||
"AssemblyLoader": {"enabled": True},
|
||||
"AudioLoader": {"enabled": True},
|
||||
"FileNodeLoader": {"enabled": True},
|
||||
"GpuCacheLoader": {"enabled": True},
|
||||
"ImagePlaneLoader": {"enabled": True},
|
||||
"LookLoader": {"enabled": True},
|
||||
"MatchmoveLoader": {"enabled": True},
|
||||
"MultiverseUsdLoader": {"enabled": True},
|
||||
"MultiverseUsdOverLoader": {"enabled": True},
|
||||
"RedshiftProxyLoader": {"enabled": True},
|
||||
"RenderSetupLoader": {"enabled": True},
|
||||
"LoadVDBtoArnold": {"enabled": True},
|
||||
"LoadVDBtoRedShift": {"enabled": True},
|
||||
"LoadVDBtoVRay": {"enabled": True},
|
||||
"VRayProxyLoader": {"enabled": True},
|
||||
"VRaySceneLoader": {"enabled": True},
|
||||
"XgenLoader": {"enabled": True},
|
||||
"YetiCacheLoader": {"enabled": True},
|
||||
"YetiRigLoader": {"enabled": True},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from ayon_server.settings import (
|
|||
ensure_unique_names,
|
||||
task_types_enum,
|
||||
)
|
||||
from ayon_server.types import ColorRGBA_uint8, ColorRGB_float
|
||||
from ayon_server.types import ColorRGBA_uint8
|
||||
|
||||
|
||||
def hardware_falloff_enum():
|
||||
|
|
|
|||
|
|
@ -1,3 +1,7 @@
|
|||
import json
|
||||
|
||||
from pydantic import validator
|
||||
from ayon_server.exceptions import BadRequestException
|
||||
from ayon_server.settings import BaseSettingsModel, SettingsField
|
||||
|
||||
|
||||
|
|
@ -14,19 +18,60 @@ class ScriptsmenuSubmodel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
_definition_mode_type = [
|
||||
{"value": "definition", "label": "Menu Builder"},
|
||||
{"value": "definition_json", "label": "Raw JSON (advanced)"}
|
||||
]
|
||||
|
||||
|
||||
class ScriptsmenuModel(BaseSettingsModel):
|
||||
"""Add a custom scripts menu to Maya"""
|
||||
_isGroup = True
|
||||
|
||||
name: str = SettingsField(title="Menu Name")
|
||||
|
||||
definition_type: str = SettingsField(
|
||||
title="Define menu using",
|
||||
description="Choose the way to define the custom scripts menu "
|
||||
"via settings",
|
||||
enum_resolver=lambda: _definition_mode_type,
|
||||
conditionalEnum=True,
|
||||
default="definition"
|
||||
)
|
||||
definition: list[ScriptsmenuSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Menu Definition",
|
||||
description="Scriptmenu Items Definition"
|
||||
)
|
||||
definition_json: str = SettingsField(
|
||||
"[]", title="Menu Definition JSON", widget="textarea",
|
||||
description=(
|
||||
"Define the custom tools menu using a JSON list. "
|
||||
"For more details on the JSON format, see "
|
||||
"[here](https://github.com/Colorbleed/scriptsmenu?tab=readme-ov-file#configuration)." # noqa: E501
|
||||
)
|
||||
)
|
||||
|
||||
@validator("definition_json")
|
||||
def validate_json(cls, value):
|
||||
if not value.strip():
|
||||
return "[]"
|
||||
try:
|
||||
converted_value = json.loads(value)
|
||||
success = isinstance(converted_value, list)
|
||||
except json.JSONDecodeError:
|
||||
success = False
|
||||
|
||||
if not success:
|
||||
raise BadRequestException(
|
||||
"The definition can't be parsed as json list object"
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
DEFAULT_SCRIPTSMENU_SETTINGS = {
|
||||
"name": "Custom Tools",
|
||||
"definition_type": "definition",
|
||||
"definition": [
|
||||
{
|
||||
"type": "action",
|
||||
|
|
@ -39,5 +84,6 @@ DEFAULT_SCRIPTSMENU_SETTINGS = {
|
|||
"shader"
|
||||
]
|
||||
}
|
||||
]
|
||||
],
|
||||
"definition_json": "[]"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring addon version."""
|
||||
__version__ = "0.1.13"
|
||||
__version__ = "0.1.15"
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ class LoadClipModel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class LoaderPuginsModel(BaseSettingsModel):
|
||||
class LoaderPluginsModel(BaseSettingsModel):
|
||||
LoadImage: LoadImageModel = SettingsField(
|
||||
default_factory=LoadImageModel,
|
||||
title="Load Image"
|
||||
|
|
|
|||
|
|
@ -28,11 +28,11 @@ from .create_plugins import (
|
|||
DEFAULT_CREATE_SETTINGS
|
||||
)
|
||||
from .publish_plugins import (
|
||||
PublishPuginsModel,
|
||||
PublishPluginsModel,
|
||||
DEFAULT_PUBLISH_PLUGIN_SETTINGS
|
||||
)
|
||||
from .loader_plugins import (
|
||||
LoaderPuginsModel,
|
||||
LoaderPluginsModel,
|
||||
DEFAULT_LOADER_PLUGINS_SETTINGS
|
||||
)
|
||||
from .workfile_builder import (
|
||||
|
|
@ -75,13 +75,13 @@ class NukeSettings(BaseSettingsModel):
|
|||
title="Creator Plugins",
|
||||
)
|
||||
|
||||
publish: PublishPuginsModel = SettingsField(
|
||||
default_factory=PublishPuginsModel,
|
||||
publish: PublishPluginsModel = SettingsField(
|
||||
default_factory=PublishPluginsModel,
|
||||
title="Publish Plugins",
|
||||
)
|
||||
|
||||
load: LoaderPuginsModel = SettingsField(
|
||||
default_factory=LoaderPuginsModel,
|
||||
load: LoaderPluginsModel = SettingsField(
|
||||
default_factory=LoaderPluginsModel,
|
||||
title="Loader Plugins",
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -219,7 +219,7 @@ class IncrementScriptVersionModel(BaseSettingsModel):
|
|||
active: bool = SettingsField(title="Active")
|
||||
|
||||
|
||||
class PublishPuginsModel(BaseSettingsModel):
|
||||
class PublishPluginsModel(BaseSettingsModel):
|
||||
CollectInstanceData: CollectInstanceDataModel = SettingsField(
|
||||
title="Collect Instance Version",
|
||||
default_factory=CollectInstanceDataModel,
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ class CreateShotClipModels(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class CreatorPuginsModel(BaseSettingsModel):
|
||||
class CreatorPluginsModel(BaseSettingsModel):
|
||||
CreateShotClip: CreateShotClipModels = SettingsField(
|
||||
default_factory=CreateShotClipModels,
|
||||
title="Create Shot Clip"
|
||||
|
|
@ -84,8 +84,8 @@ class ResolveSettings(BaseSettingsModel):
|
|||
default_factory=ResolveImageIOModel,
|
||||
title="Color Management (ImageIO)"
|
||||
)
|
||||
create: CreatorPuginsModel = SettingsField(
|
||||
default_factory=CreatorPuginsModel,
|
||||
create: CreatorPluginsModel = SettingsField(
|
||||
default_factory=CreatorPluginsModel,
|
||||
title="Creator plugins",
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -142,6 +142,7 @@ DEFAULT_SIMPLE_CREATORS = [
|
|||
"extensions": [
|
||||
".exr",
|
||||
".png",
|
||||
".dng",
|
||||
".dpx",
|
||||
".jpg",
|
||||
".tiff",
|
||||
|
|
@ -165,6 +166,7 @@ DEFAULT_SIMPLE_CREATORS = [
|
|||
"extensions": [
|
||||
".exr",
|
||||
".png",
|
||||
".dng",
|
||||
".dpx",
|
||||
".jpg",
|
||||
".jpeg",
|
||||
|
|
@ -215,6 +217,7 @@ DEFAULT_SIMPLE_CREATORS = [
|
|||
".exr",
|
||||
".jpg",
|
||||
".jpeg",
|
||||
".dng",
|
||||
".dpx",
|
||||
".bmp",
|
||||
".tif",
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring addon version."""
|
||||
__version__ = "0.1.3"
|
||||
__version__ = "0.1.4"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from ayon_server.settings import BaseSettingsModel, SettingsField
|
||||
from ayon_server.types import ColorRGBA_uint8, ColorRGB_uint8
|
||||
from ayon_server.types import ColorRGBA_uint8
|
||||
|
||||
|
||||
class CollectRenderInstancesModel(BaseSettingsModel):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue