diff --git a/client/ayon_core/addon/base.py b/client/ayon_core/addon/base.py index 6bac25b8ac..6ef838652e 100644 --- a/client/ayon_core/addon/base.py +++ b/client/ayon_core/addon/base.py @@ -16,6 +16,7 @@ import six import appdirs import ayon_api +from ayon_core import AYON_CORE_ROOT from ayon_core.lib import Logger, is_dev_mode_enabled from ayon_core.settings import get_studio_settings @@ -335,14 +336,70 @@ def _load_ayon_addons(openpype_modules, modules_key, log): return addons_to_skip_in_core +def _load_ayon_core_addons_dir( + ignore_addon_names, openpype_modules, modules_key, log +): + addons_dir = os.path.join(AYON_CORE_ROOT, "addons") + if not os.path.exists(addons_dir): + return + + imported_modules = [] + + # Make sure that addons which already have client code are not loaded + # from core again, with older code + filtered_paths = [] + for name in os.listdir(addons_dir): + if name in ignore_addon_names: + continue + path = os.path.join(addons_dir, name) + if os.path.isdir(path): + filtered_paths.append(path) + + for path in filtered_paths: + while path in sys.path: + sys.path.remove(path) + sys.path.insert(0, path) + + for name in os.listdir(path): + fullpath = os.path.join(path, name) + if os.path.isfile(fullpath): + basename, ext = os.path.splitext(name) + if ext != ".py": + continue + else: + basename = name + try: + module = __import__(basename, fromlist=("",)) + for attr_name in dir(module): + attr = getattr(module, attr_name) + if ( + inspect.isclass(attr) + and issubclass(attr, AYONAddon) + ): + new_import_str = "{}.{}".format(modules_key, basename) + sys.modules[new_import_str] = module + setattr(openpype_modules, basename, module) + imported_modules.append(module) + break + + except Exception: + log.error( + "Failed to import addon '{}'.".format(fullpath), + exc_info=True + ) + return imported_modules + + def _load_addons_in_core( ignore_addon_names, openpype_modules, modules_key, log ): + _load_ayon_core_addons_dir( + ignore_addon_names, openpype_modules, modules_key, log + ) # Add current directory at first place # - has small differences in import logic - current_dir = os.path.abspath(os.path.dirname(__file__)) - hosts_dir = os.path.join(os.path.dirname(current_dir), "hosts") - modules_dir = os.path.join(os.path.dirname(current_dir), "modules") + hosts_dir = os.path.join(AYON_CORE_ROOT, "hosts") + modules_dir = os.path.join(AYON_CORE_ROOT, "modules") ignored_host_names = set(IGNORED_HOSTS_IN_AYON) ignored_module_dir_filenames = ( diff --git a/client/ayon_core/addons/applications/ayon_applications/__init__.py b/client/ayon_core/addons/applications/ayon_applications/__init__.py new file mode 100644 index 0000000000..b4a50279ab --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/__init__.py @@ -0,0 +1,58 @@ +from .constants import ( + APPLICATIONS_ADDON_ROOT, + DEFAULT_ENV_SUBGROUP, + PLATFORM_NAMES, +) +from .exceptions import ( + ApplicationNotFound, + ApplicationExecutableNotFound, + ApplicationLaunchFailed, + MissingRequiredKey, +) +from .defs import ( + LaunchTypes, + ApplicationExecutable, + UndefinedApplicationExecutable, + ApplicationGroup, + Application, + EnvironmentToolGroup, + EnvironmentTool, +) +from .hooks import ( + LaunchHook, + PreLaunchHook, + PostLaunchHook, +) +from .manager import ( + ApplicationManager, + ApplicationLaunchContext, +) +from .addon import ApplicationsAddon + + +__all__ = ( + "DEFAULT_ENV_SUBGROUP", + "PLATFORM_NAMES", + + "ApplicationNotFound", + "ApplicationExecutableNotFound", + "ApplicationLaunchFailed", + "MissingRequiredKey", + + "LaunchTypes", + "ApplicationExecutable", + "UndefinedApplicationExecutable", + "ApplicationGroup", + "Application", + "EnvironmentToolGroup", + "EnvironmentTool", + + "LaunchHook", + "PreLaunchHook", + "PostLaunchHook", + + "ApplicationManager", + "ApplicationLaunchContext", + + "ApplicationsAddon", +) diff --git a/client/ayon_core/addons/applications/ayon_applications/addon.py b/client/ayon_core/addons/applications/ayon_applications/addon.py new file mode 100644 index 0000000000..0f1b68af0e --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/addon.py @@ -0,0 +1,173 @@ +import os +import json + +from ayon_core.addon import AYONAddon, IPluginPaths, click_wrap + +from .constants import APPLICATIONS_ADDON_ROOT +from .defs import LaunchTypes +from .manager import ApplicationManager + + +class ApplicationsAddon(AYONAddon, IPluginPaths): + name = "applications" + + def initialize(self, settings): + # TODO remove when addon is removed from ayon-core + self.enabled = self.name in settings + + def get_app_environments_for_context( + self, + project_name, + folder_path, + task_name, + full_app_name, + env_group=None, + launch_type=None, + env=None, + ): + """Calculate environment variables for launch context. + + Args: + project_name (str): Project name. + folder_path (str): Folder path. + task_name (str): Task name. + full_app_name (str): Full application name. + env_group (Optional[str]): Environment group. + launch_type (Optional[str]): Launch type. + env (Optional[dict[str, str]]): Environment variables to update. + + Returns: + dict[str, str]: Environment variables for context. + + """ + from ayon_applications.utils import get_app_environments_for_context + + if not full_app_name: + return {} + + return get_app_environments_for_context( + project_name, + folder_path, + task_name, + full_app_name, + env_group=env_group, + launch_type=launch_type, + env=env, + addons_manager=self.manager + ) + + def get_farm_publish_environment_variables( + self, + project_name, + folder_path, + task_name, + full_app_name=None, + env_group=None, + ): + """Calculate environment variables for farm publish. + + Args: + project_name (str): Project name. + folder_path (str): Folder path. + task_name (str): Task name. + env_group (Optional[str]): Environment group. + full_app_name (Optional[str]): Full application name. Value from + environment variable 'AYON_APP_NAME' is used if 'None' is + passed. + + Returns: + dict[str, str]: Environment variables for farm publish. + + """ + if full_app_name is None: + full_app_name = os.getenv("AYON_APP_NAME") + + return self.get_app_environments_for_context( + project_name, + folder_path, + task_name, + full_app_name, + env_group=env_group, + launch_type=LaunchTypes.farm_publish + ) + + def get_applications_manager(self, settings=None): + """Get applications manager. + + Args: + settings (Optional[dict]): Studio/project settings. + + Returns: + ApplicationManager: Applications manager. + + """ + return ApplicationManager(settings) + + def get_plugin_paths(self): + return { + "publish": [ + os.path.join(APPLICATIONS_ADDON_ROOT, "plugins", "publish") + ] + } + + # --- CLI --- + def cli(self, addon_click_group): + main_group = click_wrap.group( + self._cli_main, name=self.name, help="Applications addon" + ) + ( + main_group.command( + self._cli_extract_environments, + name="extractenvironments", + help=( + "Extract environment variables for context into json file" + ) + ) + .argument("output_json_path") + .option("--project", help="Project name", default=None) + .option("--folder", help="Folder path", default=None) + .option("--task", help="Task name", default=None) + .option("--app", help="Application name", default=None) + .option( + "--envgroup", + help="Environment group (e.g. \"farm\")", + default=None + ) + ) + # Convert main command to click object and add it to parent group + addon_click_group.add_command( + main_group.to_click_obj() + ) + + def _cli_main(self): + pass + + def _cli_extract_environments( + self, output_json_path, project, folder, task, app, envgroup + ): + """Produces json file with environment based on project and app. + + Called by farm integration to propagate environment into farm jobs. + + Args: + output_json_path (str): Output json file path. + project (str): Project name. + folder (str): Folder path. + task (str): Task name. + app (str): Full application name e.g. 'maya/2024'. + envgroup (str): Environment group. + + """ + if all((project, folder, task, app)): + env = self.get_farm_publish_environment_variables( + project, folder, task, app, env_group=envgroup, + ) + else: + env = os.environ.copy() + + output_dir = os.path.dirname(output_json_path) + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + with open(output_json_path, "w") as file_stream: + json.dump(env, file_stream, indent=4) diff --git a/client/ayon_core/addons/applications/ayon_applications/constants.py b/client/ayon_core/addons/applications/ayon_applications/constants.py new file mode 100644 index 0000000000..92c8f4f254 --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/constants.py @@ -0,0 +1,6 @@ +import os + +APPLICATIONS_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__)) + +PLATFORM_NAMES = {"windows", "linux", "darwin"} +DEFAULT_ENV_SUBGROUP = "standard" diff --git a/client/ayon_core/addons/applications/ayon_applications/defs.py b/client/ayon_core/addons/applications/ayon_applications/defs.py new file mode 100644 index 0000000000..5cc36041a1 --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/defs.py @@ -0,0 +1,404 @@ +import os +import platform +import json +import copy + +from ayon_core.lib import find_executable + + +class LaunchTypes: + """Launch types are filters for pre/post-launch hooks. + + Please use these variables in case they'll change values. + """ + + # Local launch - application is launched on local machine + local = "local" + # Farm render job - application is on farm + farm_render = "farm-render" + # Farm publish job - integration post-render job + farm_publish = "farm-publish" + # Remote launch - application is launched on remote machine from which + # can be started publishing + remote = "remote" + # Automated launch - application is launched with automated publishing + automated = "automated" + + +class ApplicationExecutable: + """Representation of executable loaded from settings.""" + + def __init__(self, executable): + # Try to format executable with environments + try: + executable = executable.format(**os.environ) + except Exception: + pass + + # On MacOS check if exists path to executable when ends with `.app` + # - it is common that path will lead to "/Applications/Blender" but + # real path is "/Applications/Blender.app" + if platform.system().lower() == "darwin": + executable = self.macos_executable_prep(executable) + + self.executable_path = executable + + def __str__(self): + return self.executable_path + + def __repr__(self): + return "<{}> {}".format(self.__class__.__name__, self.executable_path) + + @staticmethod + def macos_executable_prep(executable): + """Try to find full path to executable file. + + Real executable is stored in '*.app/Contents/MacOS/'. + + Having path to '*.app' gives ability to read it's plist info and + use "CFBundleExecutable" key from plist to know what is "executable." + + Plist is stored in '*.app/Contents/Info.plist'. + + This is because some '*.app' directories don't have same permissions + as real executable. + """ + # Try to find if there is `.app` file + if not os.path.exists(executable): + _executable = executable + ".app" + if os.path.exists(_executable): + executable = _executable + + # Try to find real executable if executable has `Contents` subfolder + contents_dir = os.path.join(executable, "Contents") + if os.path.exists(contents_dir): + executable_filename = None + # Load plist file and check for bundle executable + plist_filepath = os.path.join(contents_dir, "Info.plist") + if os.path.exists(plist_filepath): + import plistlib + + if hasattr(plistlib, "load"): + with open(plist_filepath, "rb") as stream: + parsed_plist = plistlib.load(stream) + else: + parsed_plist = plistlib.readPlist(plist_filepath) + executable_filename = parsed_plist.get("CFBundleExecutable") + + if executable_filename: + executable = os.path.join( + contents_dir, "MacOS", executable_filename + ) + + return executable + + def as_args(self): + return [self.executable_path] + + def _realpath(self): + """Check if path is valid executable path.""" + # Check for executable in PATH + result = find_executable(self.executable_path) + if result is not None: + return result + + # This is not 100% validation but it is better than remove ability to + # launch .bat, .sh or extentionless files + if os.path.exists(self.executable_path): + return self.executable_path + return None + + def exists(self): + if not self.executable_path: + return False + return bool(self._realpath()) + + +class UndefinedApplicationExecutable(ApplicationExecutable): + """Some applications do not require executable path from settings. + + In that case this class is used to "fake" existing executable. + """ + def __init__(self): + pass + + def __str__(self): + return self.__class__.__name__ + + def __repr__(self): + return "<{}>".format(self.__class__.__name__) + + def as_args(self): + return [] + + def exists(self): + return True + + +class ApplicationGroup: + """Hold information about application group. + + Application group wraps different versions(variants) of application. + e.g. "maya" is group and "maya_2020" is variant. + + Group hold `host_name` which is implementation name used in AYON. Also + holds `enabled` if whole app group is enabled or `icon` for application + icon path in resources. + + Group has also `environment` which hold same environments for all variants. + + Args: + name (str): Groups' name. + data (dict): Group defying data loaded from settings. + manager (ApplicationManager): Manager that created the group. + """ + + def __init__(self, name, data, manager): + self.name = name + self.manager = manager + self._data = data + + self.enabled = data["enabled"] + self.label = data["label"] or None + self.icon = data["icon"] or None + env = {} + try: + env = json.loads(data["environment"]) + except Exception: + pass + self._environment = env + + host_name = data["host_name"] or None + self.is_host = host_name is not None + self.host_name = host_name + + settings_variants = data["variants"] + variants = {} + for variant_data in settings_variants: + app_variant = Application(variant_data, self) + variants[app_variant.name] = app_variant + + self.variants = variants + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.name) + + def __iter__(self): + for variant in self.variants.values(): + yield variant + + @property + def environment(self): + return copy.deepcopy(self._environment) + + +class Application: + """Hold information about application. + + Object by itself does nothing special. + + Args: + data (dict): Data for the version containing information about + executables, variant label or if is enabled. + Only required key is `executables`. + group (ApplicationGroup): App group object that created the application + and under which application belongs. + + """ + def __init__(self, data, group): + self._data = data + name = data["name"] + label = data["label"] or name + enabled = False + if group.enabled: + enabled = data.get("enabled", True) + + if group.label: + full_label = " ".join((group.label, label)) + else: + full_label = label + env = {} + try: + env = json.loads(data["environment"]) + except Exception: + pass + + arguments = data["arguments"] + if isinstance(arguments, dict): + arguments = arguments.get(platform.system().lower()) + + if not arguments: + arguments = [] + + _executables = data["executables"].get(platform.system().lower(), []) + executables = [ + ApplicationExecutable(executable) + for executable in _executables + ] + + self.group = group + + self.name = name + self.label = label + self.enabled = enabled + self.use_python_2 = data.get("use_python_2", False) + + self.full_name = "/".join((group.name, name)) + self.full_label = full_label + self.arguments = arguments + self.executables = executables + self._environment = env + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.full_name) + + @property + def environment(self): + return copy.deepcopy(self._environment) + + @property + def manager(self): + return self.group.manager + + @property + def host_name(self): + return self.group.host_name + + @property + def icon(self): + return self.group.icon + + @property + def is_host(self): + return self.group.is_host + + def find_executable(self): + """Try to find existing executable for application. + + Returns (str): Path to executable from `executables` or None if any + exists. + """ + for executable in self.executables: + if executable.exists(): + return executable + return None + + def launch(self, *args, **kwargs): + """Launch the application. + + For this purpose is used manager's launch method to keep logic at one + place. + + Arguments must match with manager's launch method. That's why *args + **kwargs are used. + + Returns: + subprocess.Popen: Return executed process as Popen object. + """ + return self.manager.launch(self.full_name, *args, **kwargs) + + +class EnvironmentToolGroup: + """Hold information about environment tool group. + + Environment tool group may hold different variants of same tool and set + environments that are same for all of them. + + e.g. "mtoa" may have different versions but all environments except one + are same. + + Args: + data (dict): Group information with variants. + manager (ApplicationManager): Manager that creates the group. + """ + + def __init__(self, data, manager): + name = data["name"] + label = data["label"] + + self.name = name + self.label = label + self._data = data + self.manager = manager + + environment = {} + try: + environment = json.loads(data["environment"]) + except Exception: + pass + self._environment = environment + + variants = data.get("variants") or [] + variants_by_name = {} + for variant_data in variants: + tool = EnvironmentTool(variant_data, self) + variants_by_name[tool.name] = tool + self.variants = variants_by_name + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.name) + + def __iter__(self): + for variant in self.variants.values(): + yield variant + + @property + def environment(self): + return copy.deepcopy(self._environment) + + +class EnvironmentTool: + """Hold information about application tool. + + Structure of tool information. + + Args: + variant_data (dict): Variant data with environments and + host and app variant filters. + group (EnvironmentToolGroup): Name of group which wraps tool. + """ + + def __init__(self, variant_data, group): + # Backwards compatibility 3.9.1 - 3.9.2 + # - 'variant_data' contained only environments but contain also host + # and application variant filters + name = variant_data["name"] + label = variant_data["label"] + host_names = variant_data["host_names"] + app_variants = variant_data["app_variants"] + + environment = {} + try: + environment = json.loads(variant_data["environment"]) + except Exception: + pass + + self.host_names = host_names + self.app_variants = app_variants + self.name = name + self.variant_label = label + self.label = " ".join((group.label, label)) + self.group = group + + self._environment = environment + self.full_name = "/".join((group.name, name)) + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.full_name) + + @property + def environment(self): + return copy.deepcopy(self._environment) + + def is_valid_for_app(self, app): + """Is tool valid for application. + + Args: + app (Application): Application for which are prepared environments. + """ + if self.app_variants and app.full_name not in self.app_variants: + return False + + if self.host_names and app.host_name not in self.host_names: + return False + return True diff --git a/client/ayon_core/addons/applications/ayon_applications/exceptions.py b/client/ayon_core/addons/applications/ayon_applications/exceptions.py new file mode 100644 index 0000000000..d5a48d3b6b --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/exceptions.py @@ -0,0 +1,50 @@ +class ApplicationNotFound(Exception): + """Application was not found in ApplicationManager by name.""" + + def __init__(self, app_name): + self.app_name = app_name + super(ApplicationNotFound, self).__init__( + "Application \"{}\" was not found.".format(app_name) + ) + + +class ApplicationExecutableNotFound(Exception): + """Defined executable paths are not available on the machine.""" + + def __init__(self, application): + self.application = application + details = None + if not application.executables: + msg = ( + "Executable paths for application \"{}\"({}) are not set." + ) + else: + msg = ( + "Defined executable paths for application \"{}\"({})" + " are not available on this machine." + ) + details = "Defined paths:" + for executable in application.executables: + details += "\n- " + executable.executable_path + + self.msg = msg.format(application.full_label, application.full_name) + self.details = details + + exc_mgs = str(self.msg) + if details: + # Is good idea to pass new line symbol to exception message? + exc_mgs += "\n" + details + self.exc_msg = exc_mgs + super(ApplicationExecutableNotFound, self).__init__(exc_mgs) + + +class ApplicationLaunchFailed(Exception): + """Application launch failed due to known reason. + + Message should be self explanatory as traceback won't be shown. + """ + pass + + +class MissingRequiredKey(KeyError): + pass diff --git a/client/ayon_core/addons/applications/ayon_applications/hooks.py b/client/ayon_core/addons/applications/ayon_applications/hooks.py new file mode 100644 index 0000000000..6aa12a210a --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/hooks.py @@ -0,0 +1,150 @@ +import platform +from abc import ABCMeta, abstractmethod + +import six + +from ayon_core.lib import Logger + +from .defs import LaunchTypes + + +@six.add_metaclass(ABCMeta) +class LaunchHook: + """Abstract base class of launch hook.""" + # Order of prelaunch hook, will be executed as last if set to None. + order = None + # List of host implementations, skipped if empty. + hosts = set() + # Set of application groups + app_groups = set() + # Set of specific application names + app_names = set() + # Set of platform availability + platforms = set() + # Set of launch types for which is available + # - if empty then is available for all launch types + # - by default has 'local' which is most common reason for launc hooks + launch_types = {LaunchTypes.local} + + def __init__(self, launch_context): + """Constructor of launch hook. + + Always should be called + """ + self.log = Logger.get_logger(self.__class__.__name__) + + self.launch_context = launch_context + + is_valid = self.class_validation(launch_context) + if is_valid: + is_valid = self.validate() + + self.is_valid = is_valid + + @classmethod + def class_validation(cls, launch_context): + """Validation of class attributes by launch context. + + Args: + launch_context (ApplicationLaunchContext): Context of launching + application. + + Returns: + bool: Is launch hook valid for the context by class attributes. + """ + if cls.platforms: + low_platforms = tuple( + _platform.lower() + for _platform in cls.platforms + ) + if platform.system().lower() not in low_platforms: + return False + + if cls.hosts: + if launch_context.host_name not in cls.hosts: + return False + + if cls.app_groups: + if launch_context.app_group.name not in cls.app_groups: + return False + + if cls.app_names: + if launch_context.app_name not in cls.app_names: + return False + + if cls.launch_types: + if launch_context.launch_type not in cls.launch_types: + return False + + return True + + @property + def data(self): + return self.launch_context.data + + @property + def application(self): + return getattr(self.launch_context, "application", None) + + @property + def manager(self): + return getattr(self.application, "manager", None) + + @property + def host_name(self): + return getattr(self.application, "host_name", None) + + @property + def app_group(self): + return getattr(self.application, "group", None) + + @property + def app_name(self): + return getattr(self.application, "full_name", None) + + @property + def addons_manager(self): + return getattr(self.launch_context, "addons_manager", None) + + @property + def modules_manager(self): + """ + Deprecated: + Use 'addons_wrapper' instead. + """ + return self.addons_manager + + def validate(self): + """Optional validation of launch hook on initialization. + + Returns: + bool: Hook is valid (True) or invalid (False). + """ + # QUESTION Not sure if this method has any usable potential. + # - maybe result can be based on settings + return True + + @abstractmethod + def execute(self, *args, **kwargs): + """Abstract execute method where logic of hook is.""" + pass + + +class PreLaunchHook(LaunchHook): + """Abstract class of prelaunch hook. + + This launch hook will be processed before application is launched. + + If any exception will happen during processing the application won't be + launched. + """ + + +class PostLaunchHook(LaunchHook): + """Abstract class of postlaunch hook. + + This launch hook will be processed after application is launched. + + Nothing will happen if any exception will happen during processing. And + processing of other postlaunch hooks won't stop either. + """ diff --git a/client/ayon_core/addons/applications/ayon_applications/manager.py b/client/ayon_core/addons/applications/ayon_applications/manager.py new file mode 100644 index 0000000000..dca2ff4491 --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/manager.py @@ -0,0 +1,676 @@ +import os +import sys +import copy +import json +import tempfile +import platform +import inspect +import subprocess + +import six + +from ayon_core import AYON_CORE_ROOT +from ayon_core.settings import get_studio_settings +from ayon_core.lib import ( + Logger, + modules_from_path, + classes_from_module, + get_linux_launcher_args, +) +from ayon_core.addon import AddonsManager + +from .constants import DEFAULT_ENV_SUBGROUP +from .exceptions import ( + ApplicationNotFound, + ApplicationExecutableNotFound, +) +from .hooks import PostLaunchHook, PreLaunchHook +from .defs import EnvironmentToolGroup, ApplicationGroup, LaunchTypes + + +class ApplicationManager: + """Load applications and tools and store them by their full name. + + Args: + studio_settings (dict): Preloaded studio settings. When passed manager + will always use these values. Gives ability to create manager + using different settings. + """ + + def __init__(self, studio_settings=None): + self.log = Logger.get_logger(self.__class__.__name__) + + self.app_groups = {} + self.applications = {} + self.tool_groups = {} + self.tools = {} + + self._studio_settings = studio_settings + + self.refresh() + + def set_studio_settings(self, studio_settings): + """Ability to change init system settings. + + This will trigger refresh of manager. + """ + self._studio_settings = studio_settings + + self.refresh() + + def refresh(self): + """Refresh applications from settings.""" + self.app_groups.clear() + self.applications.clear() + self.tool_groups.clear() + self.tools.clear() + + if self._studio_settings is not None: + settings = copy.deepcopy(self._studio_settings) + else: + settings = get_studio_settings( + clear_metadata=False, exclude_locals=False + ) + + applications_addon_settings = settings["applications"] + + # Prepare known applications + app_defs = applications_addon_settings["applications"] + additional_apps = app_defs.pop("additional_apps") + for additional_app in additional_apps: + app_name = additional_app.pop("name") + if app_name in app_defs: + self.log.warning(( + "Additional application '{}' is already" + " in built-in applications." + ).format(app_name)) + app_defs[app_name] = additional_app + + for group_name, variant_defs in app_defs.items(): + group = ApplicationGroup(group_name, variant_defs, self) + self.app_groups[group_name] = group + for app in group: + self.applications[app.full_name] = app + + tools_definitions = applications_addon_settings["tool_groups"] + for tool_group_data in tools_definitions: + group = EnvironmentToolGroup(tool_group_data, self) + self.tool_groups[group.name] = group + for tool in group: + self.tools[tool.full_name] = tool + + def find_latest_available_variant_for_group(self, group_name): + group = self.app_groups.get(group_name) + if group is None or not group.enabled: + return None + + output = None + for _, variant in reversed(sorted(group.variants.items())): + executable = variant.find_executable() + if executable: + output = variant + break + return output + + def create_launch_context(self, app_name, **data): + """Prepare launch context for application. + + Args: + app_name (str): Name of application that should be launched. + **data (Any): Any additional data. Data may be used during + + Returns: + ApplicationLaunchContext: Launch context for application. + + Raises: + ApplicationNotFound: Application was not found by entered name. + """ + + app = self.applications.get(app_name) + if not app: + raise ApplicationNotFound(app_name) + + executable = app.find_executable() + + return ApplicationLaunchContext( + app, executable, **data + ) + + def launch_with_context(self, launch_context): + """Launch application using existing launch context. + + Args: + launch_context (ApplicationLaunchContext): Prepared launch + context. + """ + + if not launch_context.executable: + raise ApplicationExecutableNotFound(launch_context.application) + return launch_context.launch() + + def launch(self, app_name, **data): + """Launch procedure. + + For host application it's expected to contain "project_name", + "folder_path" and "task_name". + + Args: + app_name (str): Name of application that should be launched. + **data (dict): Any additional data. Data may be used during + preparation to store objects usable in multiple places. + + Raises: + ApplicationNotFound: Application was not found by entered + argument `app_name`. + ApplicationExecutableNotFound: Executables in application definition + were not found on this machine. + ApplicationLaunchFailed: Something important for application launch + failed. Exception should contain explanation message, + traceback should not be needed. + """ + + context = self.create_launch_context(app_name, **data) + return self.launch_with_context(context) + + +class ApplicationLaunchContext: + """Context of launching application. + + Main purpose of context is to prepare launch arguments and keyword + arguments for new process. Most important part of keyword arguments + preparations are environment variables. + + During the whole process is possible to use `data` attribute to store + object usable in multiple places. + + Launch arguments are strings in list. It is possible to "chain" argument + when order of them matters. That is possible to do with adding list where + order is right and should not change. + NOTE: This is recommendation, not requirement. + e.g.: `["nuke.exe", "--NukeX"]` -> In this case any part of process may + insert argument between `nuke.exe` and `--NukeX`. To keep them together + it is better to wrap them in another list: `[["nuke.exe", "--NukeX"]]`. + + Notes: + It is possible to use launch context only to prepare environment + variables. In that case `executable` may be None and can be used + 'run_prelaunch_hooks' method to run prelaunch hooks which prepare + them. + + Args: + application (Application): Application definition. + executable (ApplicationExecutable): Object with path to executable. + env_group (Optional[str]): Environment variable group. If not set + 'DEFAULT_ENV_SUBGROUP' is used. + launch_type (Optional[str]): Launch type. If not set 'local' is used. + **data (dict): Any additional data. Data may be used during + preparation to store objects usable in multiple places. + """ + + def __init__( + self, + application, + executable, + env_group=None, + launch_type=None, + **data + ): + # Application object + self.application = application + + self.addons_manager = AddonsManager() + + # Logger + logger_name = "{}-{}".format(self.__class__.__name__, + self.application.full_name) + self.log = Logger.get_logger(logger_name) + + self.executable = executable + + if launch_type is None: + launch_type = LaunchTypes.local + self.launch_type = launch_type + + if env_group is None: + env_group = DEFAULT_ENV_SUBGROUP + + self.env_group = env_group + + self.data = dict(data) + + launch_args = [] + if executable is not None: + launch_args = executable.as_args() + # subprocess.Popen launch arguments (first argument in constructor) + self.launch_args = launch_args + self.launch_args.extend(application.arguments) + if self.data.get("app_args"): + self.launch_args.extend(self.data.pop("app_args")) + + # Handle launch environemtns + src_env = self.data.pop("env", None) + if src_env is not None and not isinstance(src_env, dict): + self.log.warning(( + "Passed `env` kwarg has invalid type: {}. Expected: `dict`." + " Using `os.environ` instead." + ).format(str(type(src_env)))) + src_env = None + + if src_env is None: + src_env = os.environ + + ignored_env = {"QT_API", } + env = { + key: str(value) + for key, value in src_env.items() + if key not in ignored_env + } + # subprocess.Popen keyword arguments + self.kwargs = {"env": env} + + if platform.system().lower() == "windows": + # Detach new process from currently running process on Windows + flags = ( + subprocess.CREATE_NEW_PROCESS_GROUP + | subprocess.DETACHED_PROCESS + ) + self.kwargs["creationflags"] = flags + + if not sys.stdout: + self.kwargs["stdout"] = subprocess.DEVNULL + self.kwargs["stderr"] = subprocess.DEVNULL + + self.prelaunch_hooks = None + self.postlaunch_hooks = None + + self.process = None + self._prelaunch_hooks_executed = False + + @property + def env(self): + if ( + "env" not in self.kwargs + or self.kwargs["env"] is None + ): + self.kwargs["env"] = {} + return self.kwargs["env"] + + @env.setter + def env(self, value): + if not isinstance(value, dict): + raise ValueError( + "'env' attribute expect 'dict' object. Got: {}".format( + str(type(value)) + ) + ) + self.kwargs["env"] = value + + @property + def modules_manager(self): + """ + Deprecated: + Use 'addons_manager' instead. + + """ + return self.addons_manager + + def _collect_addons_launch_hook_paths(self): + """Helper to collect application launch hooks from addons. + + Module have to have implemented 'get_launch_hook_paths' method which + can expect application as argument or nothing. + + Returns: + List[str]: Paths to launch hook directories. + """ + + expected_types = (list, tuple, set) + + output = [] + for module in self.addons_manager.get_enabled_addons(): + # Skip module if does not have implemented 'get_launch_hook_paths' + func = getattr(module, "get_launch_hook_paths", None) + if func is None: + continue + + func = module.get_launch_hook_paths + if hasattr(inspect, "signature"): + sig = inspect.signature(func) + expect_args = len(sig.parameters) > 0 + else: + expect_args = len(inspect.getargspec(func)[0]) > 0 + + # Pass application argument if method expect it. + try: + if expect_args: + hook_paths = func(self.application) + else: + hook_paths = func() + except Exception: + self.log.warning( + "Failed to call 'get_launch_hook_paths'", + exc_info=True + ) + continue + + if not hook_paths: + continue + + # Convert string to list + if isinstance(hook_paths, six.string_types): + hook_paths = [hook_paths] + + # Skip invalid types + if not isinstance(hook_paths, expected_types): + self.log.warning(( + "Result of `get_launch_hook_paths`" + " has invalid type {}. Expected {}" + ).format(type(hook_paths), expected_types)) + continue + + output.extend(hook_paths) + return output + + def paths_to_launch_hooks(self): + """Directory paths where to look for launch hooks.""" + # This method has potential to be part of application manager (maybe). + paths = [] + + # TODO load additional studio paths from settings + global_hooks_dir = os.path.join(AYON_CORE_ROOT, "hooks") + + hooks_dirs = [ + global_hooks_dir + ] + if self.host_name: + # If host requires launch hooks and is module then launch hooks + # should be collected using 'collect_launch_hook_paths' + # - module have to implement 'get_launch_hook_paths' + host_module = self.addons_manager.get_host_addon(self.host_name) + if not host_module: + hooks_dirs.append(os.path.join( + AYON_CORE_ROOT, "hosts", self.host_name, "hooks" + )) + + for path in hooks_dirs: + if ( + os.path.exists(path) + and os.path.isdir(path) + and path not in paths + ): + paths.append(path) + + # Load modules paths + paths.extend(self._collect_addons_launch_hook_paths()) + + return paths + + def discover_launch_hooks(self, force=False): + """Load and prepare launch hooks.""" + if ( + self.prelaunch_hooks is not None + or self.postlaunch_hooks is not None + ): + if not force: + self.log.info("Launch hooks were already discovered.") + return + + self.prelaunch_hooks.clear() + self.postlaunch_hooks.clear() + + self.log.debug("Discovery of launch hooks started.") + + paths = self.paths_to_launch_hooks() + self.log.debug("Paths searched for launch hooks:\n{}".format( + "\n".join("- {}".format(path) for path in paths) + )) + + all_classes = { + "pre": [], + "post": [] + } + for path in paths: + if not os.path.exists(path): + self.log.info( + "Path to launch hooks does not exist: \"{}\"".format(path) + ) + continue + + modules, _crashed = modules_from_path(path) + for _filepath, module in modules: + all_classes["pre"].extend( + classes_from_module(PreLaunchHook, module) + ) + all_classes["post"].extend( + classes_from_module(PostLaunchHook, module) + ) + + for launch_type, classes in all_classes.items(): + hooks_with_order = [] + hooks_without_order = [] + for klass in classes: + try: + hook = klass(self) + if not hook.is_valid: + self.log.debug( + "Skipped hook invalid for current launch context: " + "{}".format(klass.__name__) + ) + continue + + if inspect.isabstract(hook): + self.log.debug("Skipped abstract hook: {}".format( + klass.__name__ + )) + continue + + # Separate hooks by pre/post class + if hook.order is None: + hooks_without_order.append(hook) + else: + hooks_with_order.append(hook) + + except Exception: + self.log.warning( + "Initialization of hook failed: " + "{}".format(klass.__name__), + exc_info=True + ) + + # Sort hooks with order by order + ordered_hooks = list(sorted( + hooks_with_order, key=lambda obj: obj.order + )) + # Extend ordered hooks with hooks without defined order + ordered_hooks.extend(hooks_without_order) + + if launch_type == "pre": + self.prelaunch_hooks = ordered_hooks + else: + self.postlaunch_hooks = ordered_hooks + + self.log.debug("Found {} prelaunch and {} postlaunch hooks.".format( + len(self.prelaunch_hooks), len(self.postlaunch_hooks) + )) + + @property + def app_name(self): + return self.application.name + + @property + def host_name(self): + return self.application.host_name + + @property + def app_group(self): + return self.application.group + + @property + def manager(self): + return self.application.manager + + def _run_process(self): + # Windows and MacOS have easier process start + low_platform = platform.system().lower() + if low_platform in ("windows", "darwin"): + return subprocess.Popen(self.launch_args, **self.kwargs) + + # Linux uses mid process + # - it is possible that the mid process executable is not + # available for this version of AYON in that case use standard + # launch + launch_args = get_linux_launcher_args() + if launch_args is None: + return subprocess.Popen(self.launch_args, **self.kwargs) + + # Prepare data that will be passed to midprocess + # - store arguments to a json and pass path to json as last argument + # - pass environments to set + app_env = self.kwargs.pop("env", {}) + json_data = { + "args": self.launch_args, + "env": app_env + } + if app_env: + # Filter environments of subprocess + self.kwargs["env"] = { + key: value + for key, value in os.environ.items() + if key in app_env + } + + # Create temp file + json_temp = tempfile.NamedTemporaryFile( + mode="w", prefix="op_app_args", suffix=".json", delete=False + ) + json_temp.close() + json_temp_filpath = json_temp.name + with open(json_temp_filpath, "w") as stream: + json.dump(json_data, stream) + + launch_args.append(json_temp_filpath) + + # Create mid-process which will launch application + process = subprocess.Popen(launch_args, **self.kwargs) + # Wait until the process finishes + # - This is important! The process would stay in "open" state. + process.wait() + # Remove the temp file + os.remove(json_temp_filpath) + # Return process which is already terminated + return process + + def run_prelaunch_hooks(self): + """Run prelaunch hooks. + + This method will be executed only once, any future calls will skip + the processing. + """ + + if self._prelaunch_hooks_executed: + self.log.warning("Prelaunch hooks were already executed.") + return + # Discover launch hooks + self.discover_launch_hooks() + + # Execute prelaunch hooks + for prelaunch_hook in self.prelaunch_hooks: + self.log.debug("Executing prelaunch hook: {}".format( + str(prelaunch_hook.__class__.__name__) + )) + prelaunch_hook.execute() + self._prelaunch_hooks_executed = True + + def launch(self): + """Collect data for new process and then create it. + + This method must not be executed more than once. + + Returns: + subprocess.Popen: Created process as Popen object. + """ + if self.process is not None: + self.log.warning("Application was already launched.") + return + + if not self._prelaunch_hooks_executed: + self.run_prelaunch_hooks() + + self.log.debug("All prelaunch hook executed. Starting new process.") + + # Prepare subprocess args + args_len_str = "" + if isinstance(self.launch_args, str): + args = self.launch_args + else: + args = self.clear_launch_args(self.launch_args) + args_len_str = " ({})".format(len(args)) + self.log.info( + "Launching \"{}\" with args{}: {}".format( + self.application.full_name, args_len_str, args + ) + ) + self.launch_args = args + + # Run process + self.process = self._run_process() + + # Process post launch hooks + for postlaunch_hook in self.postlaunch_hooks: + self.log.debug("Executing postlaunch hook: {}".format( + str(postlaunch_hook.__class__.__name__) + )) + + # TODO how to handle errors? + # - store to variable to let them accessible? + try: + postlaunch_hook.execute() + + except Exception: + self.log.warning( + "After launch procedures were not successful.", + exc_info=True + ) + + self.log.debug("Launch of {} finished.".format( + self.application.full_name + )) + + return self.process + + @staticmethod + def clear_launch_args(args): + """Collect launch arguments to final order. + + Launch argument should be list that may contain another lists this + function will upack inner lists and keep ordering. + + ``` + # source + [ [ arg1, [ arg2, arg3 ] ], arg4, [arg5, arg6]] + # result + [ arg1, arg2, arg3, arg4, arg5, arg6] + + Args: + args (list): Source arguments in list may contain inner lists. + + Return: + list: Unpacked arguments. + """ + if isinstance(args, str): + return args + all_cleared = False + while not all_cleared: + all_cleared = True + new_args = [] + for arg in args: + if isinstance(arg, (list, tuple, set)): + all_cleared = False + for _arg in arg: + new_args.append(_arg) + else: + new_args.append(arg) + args = new_args + + return args + diff --git a/client/ayon_core/addons/applications/ayon_applications/plugins/publish/collect_app_name.py b/client/ayon_core/addons/applications/ayon_applications/plugins/publish/collect_app_name.py new file mode 100644 index 0000000000..f54a551cda --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/plugins/publish/collect_app_name.py @@ -0,0 +1,48 @@ +""" +Run after global plugin 'CollectHostName' in ayon_core. + +Requires: + None + +Provides: + context -> hostName (str) + context -> appName (str) + context -> appLabel (str) +""" +import os +import pyblish.api + +from ayon_applications import ApplicationManager + + +class CollectAppName(pyblish.api.ContextPlugin): + """Collect avalon host name to context.""" + + label = "Collect App Name" + order = pyblish.api.CollectorOrder - 0.499999 + + def process(self, context): + host_name = context.data.get("hostName") + app_name = context.data.get("appName") + app_label = context.data.get("appLabel") + # Don't override value if is already set + if host_name and app_name and app_label: + return + + # Use AYON_APP_NAME to get full app name + if not app_name: + app_name = os.environ.get("AYON_APP_NAME") + + # Fill missing values based on app full name + if (not host_name or not app_label) and app_name: + app_manager = ApplicationManager() + app = app_manager.applications.get(app_name) + if app: + if not host_name: + host_name = app.host_name + if not app_label: + app_label = app.full_label + + context.data["hostName"] = host_name + context.data["appName"] = app_name + context.data["appLabel"] = app_label diff --git a/client/ayon_core/addons/applications/ayon_applications/utils.py b/client/ayon_core/addons/applications/ayon_applications/utils.py new file mode 100644 index 0000000000..234fa6c683 --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/utils.py @@ -0,0 +1,609 @@ +import os +import copy +import json +import platform +import collections + +import six +import acre + +from ayon_core import AYON_CORE_ROOT +from ayon_core.settings import get_project_settings +from ayon_core.lib import Logger, get_ayon_username +from ayon_core.addon import AddonsManager +from ayon_core.pipeline import HOST_WORKFILE_EXTENSIONS +from ayon_core.pipeline.template_data import get_template_data +from ayon_core.pipeline.workfile import ( + get_workfile_template_key, + get_workdir_with_workdir_data, + get_last_workfile, + should_use_last_workfile_on_launch, + should_open_workfiles_tool_on_launch, +) + +from .constants import PLATFORM_NAMES, DEFAULT_ENV_SUBGROUP +from .exceptions import MissingRequiredKey, ApplicationLaunchFailed +from .manager import ApplicationManager + + +def parse_environments(env_data, env_group=None, platform_name=None): + """Parse environment values from settings byt group and platform. + + Data may contain up to 2 hierarchical levels of dictionaries. At the end + of the last level must be string or list. List is joined using platform + specific joiner (';' for windows and ':' for linux and mac). + + Hierarchical levels can contain keys for subgroups and platform name. + Platform specific values must be always last level of dictionary. Platform + names are "windows" (MS Windows), "linux" (any linux distribution) and + "darwin" (any MacOS distribution). + + Subgroups are helpers added mainly for standard and on farm usage. Farm + may require different environments for e.g. licence related values or + plugins. Default subgroup is "standard". + + Examples: + ``` + { + # Unchanged value + "ENV_KEY1": "value", + # Empty values are kept (unset environment variable) + "ENV_KEY2": "", + + # Join list values with ':' or ';' + "ENV_KEY3": ["value1", "value2"], + + # Environment groups + "ENV_KEY4": { + "standard": "DEMO_SERVER_URL", + "farm": "LICENCE_SERVER_URL" + }, + + # Platform specific (and only for windows and mac) + "ENV_KEY5": { + "windows": "windows value", + "darwin": ["value 1", "value 2"] + }, + + # Environment groups and platform combination + "ENV_KEY6": { + "farm": "FARM_VALUE", + "standard": { + "windows": ["value1", "value2"], + "linux": "value1", + "darwin": "" + } + } + } + ``` + """ + output = {} + if not env_data: + return output + + if not env_group: + env_group = DEFAULT_ENV_SUBGROUP + + if not platform_name: + platform_name = platform.system().lower() + + for key, value in env_data.items(): + if isinstance(value, dict): + # Look if any key is platform key + # - expect that represents environment group if does not contain + # platform keys + if not PLATFORM_NAMES.intersection(set(value.keys())): + # Skip the key if group is not available + if env_group not in value: + continue + value = value[env_group] + + # Check again if value is dictionary + # - this time there should be only platform keys + if isinstance(value, dict): + value = value.get(platform_name) + + # Check if value is list and join it's values + # QUESTION Should empty values be skipped? + if isinstance(value, (list, tuple)): + value = os.pathsep.join(value) + + # Set key to output if value is string + if isinstance(value, six.string_types): + output[key] = value + return output + + +class EnvironmentPrepData(dict): + """Helper dictionary for storin temp data during environment prep. + + Args: + data (dict): Data must contain required keys. + """ + required_keys = ( + "project_entity", "folder_entity", "task_entity", "app", "anatomy" + ) + + def __init__(self, data): + for key in self.required_keys: + if key not in data: + raise MissingRequiredKey(key) + + if not data.get("log"): + data["log"] = Logger.get_logger("EnvironmentPrepData") + + if data.get("env") is None: + data["env"] = os.environ.copy() + + project_name = data["project_entity"]["name"] + if "project_settings" not in data: + data["project_settings"] = get_project_settings(project_name) + + super(EnvironmentPrepData, self).__init__(data) + + +def get_app_environments_for_context( + project_name, + folder_path, + task_name, + app_name, + env_group=None, + launch_type=None, + env=None, + addons_manager=None +): + """Prepare environment variables by context. + Args: + project_name (str): Name of project. + folder_path (str): Folder path. + task_name (str): Name of task. + app_name (str): Name of application that is launched and can be found + by ApplicationManager. + env_group (Optional[str]): Name of environment group. If not passed + default group is used. + launch_type (Optional[str]): Type for which prelaunch hooks are + executed. + env (Optional[dict[str, str]]): Initial environment variables. + `os.environ` is used when not passed. + addons_manager (Optional[AddonsManager]): Initialized modules + manager. + + Returns: + dict: Environments for passed context and application. + """ + + # Prepare app object which can be obtained only from ApplicationManager + app_manager = ApplicationManager() + context = app_manager.create_launch_context( + app_name, + project_name=project_name, + folder_path=folder_path, + task_name=task_name, + env_group=env_group, + launch_type=launch_type, + env=env, + addons_manager=addons_manager, + modules_manager=addons_manager, + ) + context.run_prelaunch_hooks() + return context.env + + +def _merge_env(env, current_env): + """Modified function(merge) from acre module.""" + result = current_env.copy() + for key, value in env.items(): + # Keep missing keys by not filling `missing` kwarg + value = acre.lib.partial_format(value, data=current_env) + result[key] = value + return result + + +def _add_python_version_paths(app, env, logger, addons_manager): + """Add vendor packages specific for a Python version.""" + + for addon in addons_manager.get_enabled_addons(): + addon.modify_application_launch_arguments(app, env) + + # Skip adding if host name is not set + if not app.host_name: + return + + # Add Python 2/3 modules + python_vendor_dir = os.path.join( + AYON_CORE_ROOT, + "vendor", + "python" + ) + if app.use_python_2: + pythonpath = os.path.join(python_vendor_dir, "python_2") + else: + pythonpath = os.path.join(python_vendor_dir, "python_3") + + if not os.path.exists(pythonpath): + return + + logger.debug("Adding Python version specific paths to PYTHONPATH") + python_paths = [pythonpath] + + # Load PYTHONPATH from current launch context + python_path = env.get("PYTHONPATH") + if python_path: + python_paths.append(python_path) + + # Set new PYTHONPATH to launch context environments + env["PYTHONPATH"] = os.pathsep.join(python_paths) + + +def prepare_app_environments( + data, env_group=None, implementation_envs=True, addons_manager=None +): + """Modify launch environments based on launched app and context. + + Args: + data (EnvironmentPrepData): Dictionary where result and intermediate + result will be stored. + + """ + app = data["app"] + log = data["log"] + source_env = data["env"].copy() + + if addons_manager is None: + addons_manager = AddonsManager() + + _add_python_version_paths(app, source_env, log, addons_manager) + + # Use environments from local settings + filtered_local_envs = {} + # NOTE Overrides for environment variables are not implemented in AYON. + # project_settings = data["project_settings"] + # whitelist_envs = project_settings["general"].get("local_env_white_list") + # if whitelist_envs: + # local_settings = get_local_settings() + # local_envs = local_settings.get("environments") or {} + # filtered_local_envs = { + # key: value + # for key, value in local_envs.items() + # if key in whitelist_envs + # } + + # Apply local environment variables for already existing values + for key, value in filtered_local_envs.items(): + if key in source_env: + source_env[key] = value + + # `app_and_tool_labels` has debug purpose + app_and_tool_labels = [app.full_name] + # Environments for application + environments = [ + app.group.environment, + app.environment + ] + + folder_entity = data.get("folder_entity") + # Add tools environments + groups_by_name = {} + tool_by_group_name = collections.defaultdict(dict) + if folder_entity: + # Make sure each tool group can be added only once + for key in folder_entity["attrib"].get("tools") or []: + tool = app.manager.tools.get(key) + if not tool or not tool.is_valid_for_app(app): + continue + groups_by_name[tool.group.name] = tool.group + tool_by_group_name[tool.group.name][tool.name] = tool + + for group_name in sorted(groups_by_name.keys()): + group = groups_by_name[group_name] + environments.append(group.environment) + for tool_name in sorted(tool_by_group_name[group_name].keys()): + tool = tool_by_group_name[group_name][tool_name] + environments.append(tool.environment) + app_and_tool_labels.append(tool.full_name) + + log.debug( + "Will add environments for apps and tools: {}".format( + ", ".join(app_and_tool_labels) + ) + ) + + env_values = {} + for _env_values in environments: + if not _env_values: + continue + + # Choose right platform + tool_env = parse_environments(_env_values, env_group) + + # Apply local environment variables + # - must happen between all values because they may be used during + # merge + for key, value in filtered_local_envs.items(): + if key in tool_env: + tool_env[key] = value + + # Merge dictionaries + env_values = _merge_env(tool_env, env_values) + + merged_env = _merge_env(env_values, source_env) + + loaded_env = acre.compute(merged_env, cleanup=False) + + final_env = None + # Add host specific environments + if app.host_name and implementation_envs: + host_addon = addons_manager.get_host_addon(app.host_name) + add_implementation_envs = None + if host_addon: + add_implementation_envs = getattr( + host_addon, "add_implementation_envs", None + ) + if add_implementation_envs: + # Function may only modify passed dict without returning value + final_env = add_implementation_envs(loaded_env, app) + + if final_env is None: + final_env = loaded_env + + keys_to_remove = set(source_env.keys()) - set(final_env.keys()) + + # Update env + data["env"].update(final_env) + for key in keys_to_remove: + data["env"].pop(key, None) + + +def apply_project_environments_value( + project_name, env, project_settings=None, env_group=None +): + """Apply project specific environments on passed environments. + + The environments are applied on passed `env` argument value so it is not + required to apply changes back. + + Args: + project_name (str): Name of project for which environments should be + received. + env (dict): Environment values on which project specific environments + will be applied. + project_settings (dict): Project settings for passed project name. + Optional if project settings are already prepared. + + Returns: + dict: Passed env values with applied project environments. + + Raises: + KeyError: If project settings do not contain keys for project specific + environments. + + """ + if project_settings is None: + project_settings = get_project_settings(project_name) + + env_value = project_settings["core"]["project_environments"] + if env_value: + env_value = json.loads(env_value) + parsed_value = parse_environments(env_value, env_group) + env.update(acre.compute( + _merge_env(parsed_value, env), + cleanup=False + )) + return env + + +def prepare_context_environments(data, env_group=None, addons_manager=None): + """Modify launch environments with context data for launched host. + + Args: + data (EnvironmentPrepData): Dictionary where result and intermediate + result will be stored. + + """ + # Context environments + log = data["log"] + + project_entity = data["project_entity"] + folder_entity = data["folder_entity"] + task_entity = data["task_entity"] + if not project_entity: + log.info( + "Skipping context environments preparation." + " Launch context does not contain required data." + ) + return + + # Load project specific environments + project_name = project_entity["name"] + project_settings = get_project_settings(project_name) + data["project_settings"] = project_settings + + app = data["app"] + context_env = { + "AYON_PROJECT_NAME": project_entity["name"], + "AYON_APP_NAME": app.full_name + } + if folder_entity: + folder_path = folder_entity["path"] + context_env["AYON_FOLDER_PATH"] = folder_path + + if task_entity: + context_env["AYON_TASK_NAME"] = task_entity["name"] + + log.debug( + "Context environments set:\n{}".format( + json.dumps(context_env, indent=4) + ) + ) + data["env"].update(context_env) + + # Apply project specific environments on current env value + # - apply them once the context environments are set + apply_project_environments_value( + project_name, data["env"], project_settings, env_group + ) + + if not app.is_host: + return + + data["env"]["AYON_HOST_NAME"] = app.host_name + + if not folder_entity or not task_entity: + # QUESTION replace with log.info and skip workfile discovery? + # - technically it should be possible to launch host without context + raise ApplicationLaunchFailed( + "Host launch require folder and task context." + ) + + workdir_data = get_template_data( + project_entity, + folder_entity, + task_entity, + app.host_name, + project_settings + ) + data["workdir_data"] = workdir_data + + anatomy = data["anatomy"] + + task_type = workdir_data["task"]["type"] + # Temp solution how to pass task type to `_prepare_last_workfile` + data["task_type"] = task_type + + try: + workdir = get_workdir_with_workdir_data( + workdir_data, + anatomy.project_name, + anatomy, + project_settings=project_settings + ) + + except Exception as exc: + raise ApplicationLaunchFailed( + "Error in anatomy.format: {}".format(str(exc)) + ) + + if not os.path.exists(workdir): + log.debug( + "Creating workdir folder: \"{}\"".format(workdir) + ) + try: + os.makedirs(workdir) + except Exception as exc: + raise ApplicationLaunchFailed( + "Couldn't create workdir because: {}".format(str(exc)) + ) + + data["env"]["AYON_WORKDIR"] = workdir + + _prepare_last_workfile(data, workdir, addons_manager) + + +def _prepare_last_workfile(data, workdir, addons_manager): + """last workfile workflow preparation. + + Function check if should care about last workfile workflow and tries + to find the last workfile. Both information are stored to `data` and + environments. + + Last workfile is filled always (with version 1) even if any workfile + exists yet. + + Args: + data (EnvironmentPrepData): Dictionary where result and intermediate + result will be stored. + workdir (str): Path to folder where workfiles should be stored. + + """ + if not addons_manager: + addons_manager = AddonsManager() + + log = data["log"] + + _workdir_data = data.get("workdir_data") + if not _workdir_data: + log.info( + "Skipping last workfile preparation." + " Key `workdir_data` not filled." + ) + return + + app = data["app"] + workdir_data = copy.deepcopy(_workdir_data) + project_name = data["project_name"] + task_name = data["task_name"] + task_type = data["task_type"] + + start_last_workfile = data.get("start_last_workfile") + if start_last_workfile is None: + start_last_workfile = should_use_last_workfile_on_launch( + project_name, app.host_name, task_name, task_type + ) + else: + log.info("Opening of last workfile was disabled by user") + + data["start_last_workfile"] = start_last_workfile + + workfile_startup = should_open_workfiles_tool_on_launch( + project_name, app.host_name, task_name, task_type + ) + data["workfile_startup"] = workfile_startup + + # Store boolean as "0"(False) or "1"(True) + data["env"]["AVALON_OPEN_LAST_WORKFILE"] = ( + str(int(bool(start_last_workfile))) + ) + data["env"]["AYON_WORKFILE_TOOL_ON_START"] = ( + str(int(bool(workfile_startup))) + ) + + _sub_msg = "" if start_last_workfile else " not" + log.debug( + "Last workfile should{} be opened on start.".format(_sub_msg) + ) + + # Last workfile path + last_workfile_path = data.get("last_workfile_path") or "" + if not last_workfile_path: + host_addon = addons_manager.get_host_addon(app.host_name) + if host_addon: + extensions = host_addon.get_workfile_extensions() + else: + extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) + + if extensions: + anatomy = data["anatomy"] + project_settings = data["project_settings"] + task_type = workdir_data["task"]["type"] + template_key = get_workfile_template_key( + project_name, + task_type, + app.host_name, + project_settings=project_settings + ) + # Find last workfile + file_template = anatomy.get_template_item( + "work", template_key, "file" + ).template + + workdir_data.update({ + "version": 1, + "user": get_ayon_username(), + "ext": extensions[0] + }) + + last_workfile_path = get_last_workfile( + workdir, file_template, workdir_data, extensions, True + ) + + if os.path.exists(last_workfile_path): + log.debug(( + "Workfiles for launch context does not exists" + " yet but path will be set." + )) + log.debug( + "Setting last workfile path: {}".format(last_workfile_path) + ) + + data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path + data["last_workfile_path"] = last_workfile_path diff --git a/client/ayon_core/cli.py b/client/ayon_core/cli.py index 2759b4fccf..80fbb57340 100644 --- a/client/ayon_core/cli.py +++ b/client/ayon_core/cli.py @@ -4,6 +4,7 @@ import os import sys import code import traceback +from pathlib import Path import click import acre @@ -11,7 +12,7 @@ import acre from ayon_core import AYON_CORE_ROOT from ayon_core.addon import AddonsManager from ayon_core.settings import get_general_environments -from ayon_core.lib import initialize_ayon_connection +from ayon_core.lib import initialize_ayon_connection, is_running_from_build from .cli_commands import Commands @@ -81,7 +82,7 @@ main_cli.set_alias("addon", "module") @main_cli.command() @click.argument("output_json_path") @click.option("--project", help="Project name", default=None) -@click.option("--asset", help="Asset name", default=None) +@click.option("--asset", help="Folder path", default=None) @click.option("--task", help="Task name", default=None) @click.option("--app", help="Application name", default=None) @click.option( @@ -96,6 +97,10 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup): environments will be extracted. Context options are "project", "asset", "task", "app" + + Deprecated: + This function is deprecated and will be removed in future. Please use + 'addon applications extractenvironments ...' instead. """ Commands.extractenvironments( output_json_path, project, asset, task, app, envgroup @@ -127,7 +132,7 @@ def publish_report_viewer(): @main_cli.command() @click.argument("output_path") @click.option("--project", help="Define project context") -@click.option("--asset", help="Define asset in project (project must be set)") +@click.option("--folder", help="Define folder in project (project must be set)") @click.option( "--strict", is_flag=True, @@ -136,18 +141,18 @@ def publish_report_viewer(): def contextselection( output_path, project, - asset, + folder, strict ): """Show Qt dialog to select context. - Context is project name, asset name and task name. The result is stored + Context is project name, folder path and task name. The result is stored into json file which path is passed in first argument. """ Commands.contextselection( output_path, project, - asset, + folder, strict ) @@ -163,16 +168,27 @@ def run(script): if not script: print("Error: missing path to script file.") + return + + # Remove first argument if it is the same as AYON executable + # - Forward compatibility with future AYON versions. + # - Current AYON launcher keeps the arguments with first argument but + # future versions might remove it. + first_arg = sys.argv[0] + if is_running_from_build(): + comp_path = os.path.join(os.environ["AYON_ROOT"], "start.py") else: + comp_path = os.getenv("AYON_EXECUTABLE") + # Compare paths and remove first argument if it is the same as AYON + if Path(first_arg).resolve() == Path(comp_path).resolve(): + sys.argv.pop(0) - args = sys.argv - args.remove("run") - args.remove(script) - sys.argv = args + # Remove 'run' command from sys.argv + sys.argv.remove("run") - args_string = " ".join(args[1:]) - print(f"... running: {script} {args_string}") - runpy.run_path(script, run_name="__main__", ) + args_string = " ".join(sys.argv[1:]) + print(f"... running: {script} {args_string}") + runpy.run_path(script, run_name="__main__") @main_cli.command() diff --git a/client/ayon_core/cli_commands.py b/client/ayon_core/cli_commands.py index fa90571462..0fb18be687 100644 --- a/client/ayon_core/cli_commands.py +++ b/client/ayon_core/cli_commands.py @@ -2,7 +2,7 @@ """Implementation of AYON commands.""" import os import sys -import json +import warnings class Commands: @@ -57,10 +57,7 @@ class Commands: """ from ayon_core.lib import Logger - from ayon_core.lib.applications import ( - get_app_environments_for_context, - LaunchTypes, - ) + from ayon_core.addon import AddonsManager from ayon_core.pipeline import ( install_ayon_plugins, @@ -68,7 +65,6 @@ class Commands: ) # Register target and host - import pyblish.api import pyblish.util if not isinstance(path, str): @@ -99,15 +95,13 @@ class Commands: for plugin_path in publish_paths: pyblish.api.register_plugin_path(plugin_path) - app_full_name = os.getenv("AYON_APP_NAME") - if app_full_name: + applications_addon = manager.get_enabled_addon("applications") + if applications_addon is not None: context = get_global_context() - env = get_app_environments_for_context( + env = applications_addon.get_farm_publish_environment_variables( context["project_name"], context["folder_path"], context["task_name"], - app_full_name, - launch_type=LaunchTypes.farm_publish, ) os.environ.update(env) @@ -149,36 +143,36 @@ class Commands: log.info("Publish finished.") @staticmethod - def extractenvironments(output_json_path, project, asset, task, app, - env_group): + def extractenvironments( + output_json_path, project, asset, task, app, env_group + ): """Produces json file with environment based on project and app. Called by Deadline plugin to propagate environment into render jobs. """ - from ayon_core.lib.applications import ( - get_app_environments_for_context, - LaunchTypes, + from ayon_core.addon import AddonsManager + + warnings.warn( + ( + "Command 'extractenvironments' is deprecated and will be" + " removed in future. Please use " + "'addon applications extractenvironments ...' instead." + ), + DeprecationWarning ) - if all((project, asset, task, app)): - env = get_app_environments_for_context( - project, - asset, - task, - app, - env_group=env_group, - launch_type=LaunchTypes.farm_render + addons_manager = AddonsManager() + applications_addon = addons_manager.get_enabled_addon("applications") + if applications_addon is None: + raise RuntimeError( + "Applications addon is not available or enabled." ) - else: - env = os.environ.copy() - output_dir = os.path.dirname(output_json_path) - if not os.path.exists(output_dir): - os.makedirs(output_dir) - - with open(output_json_path, "w") as file_stream: - json.dump(env, file_stream, indent=4) + # Please ignore the fact this is using private method + applications_addon._cli_extract_environments( + output_json_path, project, asset, task, app, env_group + ) @staticmethod def contextselection(output_path, project_name, folder_path, strict): diff --git a/client/ayon_core/hooks/pre_add_last_workfile_arg.py b/client/ayon_core/hooks/pre_add_last_workfile_arg.py index d11bb106d6..74964e0df9 100644 --- a/client/ayon_core/hooks/pre_add_last_workfile_arg.py +++ b/client/ayon_core/hooks/pre_add_last_workfile_arg.py @@ -1,6 +1,6 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class AddLastWorkfileToLaunchArgs(PreLaunchHook): diff --git a/client/ayon_core/hooks/pre_copy_template_workfile.py b/client/ayon_core/hooks/pre_copy_template_workfile.py index 096ad7dd7e..c884116578 100644 --- a/client/ayon_core/hooks/pre_copy_template_workfile.py +++ b/client/ayon_core/hooks/pre_copy_template_workfile.py @@ -1,7 +1,7 @@ import os import shutil from ayon_core.settings import get_project_settings -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.pipeline.workfile import ( get_custom_workfile_template, get_custom_workfile_template_by_string_context diff --git a/client/ayon_core/hooks/pre_create_extra_workdir_folders.py b/client/ayon_core/hooks/pre_create_extra_workdir_folders.py index 72c6bf2f68..8cbdaa338e 100644 --- a/client/ayon_core/hooks/pre_create_extra_workdir_folders.py +++ b/client/ayon_core/hooks/pre_create_extra_workdir_folders.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.pipeline.workfile import create_workdir_extra_folders diff --git a/client/ayon_core/hooks/pre_global_host_data.py b/client/ayon_core/hooks/pre_global_host_data.py index 27e66450ab..e93b512742 100644 --- a/client/ayon_core/hooks/pre_global_host_data.py +++ b/client/ayon_core/hooks/pre_global_host_data.py @@ -1,7 +1,7 @@ from ayon_api import get_project, get_folder_by_path, get_task_by_name -from ayon_core.lib.applications import ( - PreLaunchHook, +from ayon_applications import PreLaunchHook +from ayon_applications.utils import ( EnvironmentPrepData, prepare_app_environments, prepare_context_environments diff --git a/client/ayon_core/hooks/pre_mac_launch.py b/client/ayon_core/hooks/pre_mac_launch.py index 34680155f1..b234a20310 100644 --- a/client/ayon_core/hooks/pre_mac_launch.py +++ b/client/ayon_core/hooks/pre_mac_launch.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class LaunchWithTerminal(PreLaunchHook): diff --git a/client/ayon_core/hooks/pre_new_console_apps.py b/client/ayon_core/hooks/pre_new_console_apps.py index c81b924573..9777d37900 100644 --- a/client/ayon_core/hooks/pre_new_console_apps.py +++ b/client/ayon_core/hooks/pre_new_console_apps.py @@ -1,5 +1,5 @@ import subprocess -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class LaunchNewConsoleApps(PreLaunchHook): diff --git a/client/ayon_core/hooks/pre_ocio_hook.py b/client/ayon_core/hooks/pre_ocio_hook.py index e135a5bb12..0817afec71 100644 --- a/client/ayon_core/hooks/pre_ocio_hook.py +++ b/client/ayon_core/hooks/pre_ocio_hook.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook +from ayon_applications import PreLaunchHook from ayon_core.pipeline.colorspace import get_imageio_config from ayon_core.pipeline.template_data import get_template_data_with_names diff --git a/client/ayon_core/host/dirmap.py b/client/ayon_core/host/dirmap.py index effafb6261..2e24877d28 100644 --- a/client/ayon_core/host/dirmap.py +++ b/client/ayon_core/host/dirmap.py @@ -36,23 +36,23 @@ class HostDirmap(object): host_name, project_name, project_settings=None, - sync_module=None + sitesync_addon=None ): self.host_name = host_name self.project_name = project_name self._project_settings = project_settings - self._sync_module = sync_module + self._sitesync_addon = sitesync_addon # to limit reinit of Modules - self._sync_module_discovered = sync_module is not None + self._sitesync_addon_discovered = sitesync_addon is not None self._log = None @property - def sync_module(self): - if not self._sync_module_discovered: - self._sync_module_discovered = True + def sitesync_addon(self): + if not self._sitesync_addon_discovered: + self._sitesync_addon_discovered = True manager = AddonsManager() - self._sync_module = manager.get("sync_server") - return self._sync_module + self._sitesync_addon = manager.get("sitesync") + return self._sitesync_addon @property def project_settings(self): @@ -158,25 +158,25 @@ class HostDirmap(object): """ project_name = self.project_name - sync_module = self.sync_module + sitesync_addon = self.sitesync_addon mapping = {} if ( - sync_module is None - or not sync_module.enabled - or project_name not in sync_module.get_enabled_projects() + sitesync_addon is None + or not sitesync_addon.enabled + or project_name not in sitesync_addon.get_enabled_projects() ): return mapping - active_site = sync_module.get_local_normalized_site( - sync_module.get_active_site(project_name)) - remote_site = sync_module.get_local_normalized_site( - sync_module.get_remote_site(project_name)) + active_site = sitesync_addon.get_local_normalized_site( + sitesync_addon.get_active_site(project_name)) + remote_site = sitesync_addon.get_local_normalized_site( + sitesync_addon.get_remote_site(project_name)) self.log.debug( "active {} - remote {}".format(active_site, remote_site) ) if active_site == "local" and active_site != remote_site: - sync_settings = sync_module.get_sync_project_setting( + sync_settings = sitesync_addon.get_sync_project_setting( project_name, exclude_locals=False, cached=False) @@ -194,7 +194,7 @@ class HostDirmap(object): self.log.debug("remote overrides {}".format(remote_overrides)) current_platform = platform.system().lower() - remote_provider = sync_module.get_provider_for_site( + remote_provider = sitesync_addon.get_provider_for_site( project_name, remote_site ) # dirmap has sense only with regular disk provider, in the workfile diff --git a/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py b/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py index 979d9ff3e5..a37481566e 100644 --- a/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py +++ b/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py @@ -6,10 +6,7 @@ from ayon_core.lib import ( get_ayon_launcher_args, is_using_ayon_console, ) -from ayon_core.lib.applications import ( - PreLaunchHook, - LaunchTypes, -) +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.aftereffects import get_launch_script_path diff --git a/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py b/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py index 00b297f998..9041ef7309 100644 --- a/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py +++ b/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py @@ -1,6 +1,6 @@ from pathlib import Path -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class AddPythonScriptToLaunchArgs(PreLaunchHook): diff --git a/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py b/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py index c80a1bd669..8f46eea0de 100644 --- a/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py +++ b/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py @@ -2,7 +2,7 @@ import os import re import subprocess from platform import system -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class InstallPySideToBlender(PreLaunchHook): diff --git a/client/ayon_core/hosts/blender/hooks/pre_windows_console.py b/client/ayon_core/hosts/blender/hooks/pre_windows_console.py index e3a8593cd9..47303a7af4 100644 --- a/client/ayon_core/hosts/blender/hooks/pre_windows_console.py +++ b/client/ayon_core/hosts/blender/hooks/pre_windows_console.py @@ -1,5 +1,5 @@ import subprocess -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class BlenderConsoleWindows(PreLaunchHook): diff --git a/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py b/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py index d94fff8f2b..8350c7b7c8 100644 --- a/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py +++ b/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py @@ -3,7 +3,7 @@ import shutil import winreg import subprocess from ayon_core.lib import get_ayon_launcher_args -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.celaction import CELACTION_ROOT_DIR diff --git a/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py b/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py index 1ff7ad7ccf..77a9435205 100644 --- a/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py +++ b/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py @@ -9,7 +9,7 @@ from ayon_core.lib import ( get_ayon_username, run_subprocess, ) -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts import flame as opflame diff --git a/client/ayon_core/hosts/fusion/api/lib.py b/client/ayon_core/hosts/fusion/api/lib.py index ba650cc73f..03a1eeeb65 100644 --- a/client/ayon_core/hosts/fusion/api/lib.py +++ b/client/ayon_core/hosts/fusion/api/lib.py @@ -7,7 +7,7 @@ from ayon_core.lib import Logger from ayon_core.pipeline import registered_host from ayon_core.pipeline.create import CreateContext -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity self = sys.modules[__name__] self._project = None @@ -57,7 +57,7 @@ def update_frame_range(start, end, comp=None, set_render_range=True, def set_current_context_framerange(folder_entity=None): """Set Comp's frame range based on current folder.""" if folder_entity is None: - folder_entity = get_current_project_folder( + folder_entity = get_current_folder_entity( fields={"attrib.frameStart", "attrib.frameEnd", "attrib.handleStart", @@ -76,7 +76,7 @@ def set_current_context_framerange(folder_entity=None): def set_current_context_fps(folder_entity=None): """Set Comp's frame rate (FPS) to based on current asset""" if folder_entity is None: - folder_entity = get_current_project_folder(fields={"attrib.fps"}) + folder_entity = get_current_folder_entity(fields={"attrib.fps"}) fps = float(folder_entity["attrib"].get("fps", 24.0)) comp = get_current_comp() @@ -88,7 +88,7 @@ def set_current_context_fps(folder_entity=None): def set_current_context_resolution(folder_entity=None): """Set Comp's resolution width x height default based on current folder""" if folder_entity is None: - folder_entity = get_current_project_folder( + folder_entity = get_current_folder_entity( fields={"attrib.resolutionWidth", "attrib.resolutionHeight"}) folder_attributes = folder_entity["attrib"] @@ -124,7 +124,7 @@ def validate_comp_prefs(comp=None, force_repair=False): "attrib.resolutionHeight", "attrib.pixelAspect", } - folder_entity = get_current_project_folder(fields=fields) + folder_entity = get_current_folder_entity(fields=fields) folder_path = folder_entity["path"] folder_attributes = folder_entity["attrib"] @@ -389,7 +389,7 @@ def prompt_reset_context(): return None options = dialog.get_values() - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() if options["frame_range"]: set_current_context_framerange(folder_entity) diff --git a/client/ayon_core/hosts/fusion/api/pipeline.py b/client/ayon_core/hosts/fusion/api/pipeline.py index 03773790e4..2d1073ec7d 100644 --- a/client/ayon_core/hosts/fusion/api/pipeline.py +++ b/client/ayon_core/hosts/fusion/api/pipeline.py @@ -43,7 +43,7 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") # Track whether the workfile tool is about to save -ABOUT_TO_SAVE = False +_about_to_save = False class FusionLogHandler(logging.Handler): @@ -176,15 +176,15 @@ def on_save(event): validate_comp_prefs(comp) # We are now starting the actual save directly - global ABOUT_TO_SAVE - ABOUT_TO_SAVE = False + global _about_to_save + _about_to_save = False def on_task_changed(): - global ABOUT_TO_SAVE - print(f"Task changed: {ABOUT_TO_SAVE}") + global _about_to_save + print(f"Task changed: {_about_to_save}") # TODO: Only do this if not headless - if ABOUT_TO_SAVE: + if _about_to_save: # Let's prompt the user to update the context settings or not prompt_reset_context() @@ -228,7 +228,7 @@ def before_workfile_save(event): # have been shut down, and restarted - which will restart it to the # environment Fusion started with; not necessarily where the artist # is currently working. - # The `ABOUT_TO_SAVE` var is used to detect context changes when + # The `_about_to_save` var is used to detect context changes when # saving into another asset. If we keep it False it will be ignored # as context change. As such, before we change tasks we will only # consider it the current filepath is within the currently known @@ -239,8 +239,8 @@ def before_workfile_save(event): filepath = comp.GetAttrs()["COMPS_FileName"] workdir = os.environ.get("AYON_WORKDIR") if Path(workdir) in Path(filepath).parents: - global ABOUT_TO_SAVE - ABOUT_TO_SAVE = True + global _about_to_save + _about_to_save = True def ls(): diff --git a/client/ayon_core/hosts/fusion/api/plugin.py b/client/ayon_core/hosts/fusion/api/plugin.py index 492841f967..efe8269120 100644 --- a/client/ayon_core/hosts/fusion/api/plugin.py +++ b/client/ayon_core/hosts/fusion/api/plugin.py @@ -16,6 +16,12 @@ from ayon_core.pipeline import ( AVALON_INSTANCE_ID, AYON_INSTANCE_ID, ) +from ayon_core.pipeline.workfile import get_workdir +from ayon_api import ( + get_project, + get_folder_by_path, + get_task_by_name +) class GenericCreateSaver(Creator): @@ -125,6 +131,8 @@ class GenericCreateSaver(Creator): product_name = data["productName"] if ( original_product_name != product_name + or tool.GetData("openpype.task") != data["task"] + or tool.GetData("openpype.folderPath") != data["folderPath"] or original_format != data["creator_attributes"]["image_format"] ): self._configure_saver_tool(data, tool, product_name) @@ -145,7 +153,30 @@ class GenericCreateSaver(Creator): folder_path = formatting_data["folderPath"] folder_name = folder_path.rsplit("/", 1)[-1] - workdir = os.path.normpath(os.getenv("AYON_WORKDIR")) + # If the folder path and task do not match the current context then the + # workdir is not just the `AYON_WORKDIR`. Hence, we need to actually + # compute the resulting workdir + if ( + data["folderPath"] == self.create_context.get_current_folder_path() + and data["task"] == self.create_context.get_current_task_name() + ): + workdir = os.path.normpath(os.getenv("AYON_WORKDIR")) + else: + # TODO: Optimize this logic + project_name = self.create_context.get_current_project_name() + project_entity = get_project(project_name) + folder_entity = get_folder_by_path(project_name, + data["folderPath"]) + task_entity = get_task_by_name(project_name, + folder_id=folder_entity["id"], + task_name=data["task"]) + workdir = get_workdir( + project_entity=project_entity, + folder_entity=folder_entity, + task_entity=task_entity, + host_name=self.create_context.host_name, + ) + formatting_data.update({ "workdir": workdir, "frame": "0" * frame_padding, diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_launch_menu_hook.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_launch_menu_hook.py new file mode 100644 index 0000000000..e70d4b844e --- /dev/null +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_launch_menu_hook.py @@ -0,0 +1,36 @@ +import os +from ayon_core.lib import PreLaunchHook +from ayon_core.hosts.fusion import FUSION_HOST_DIR + + +class FusionLaunchMenuHook(PreLaunchHook): + """Launch AYON menu on start of Fusion""" + app_groups = ["fusion"] + order = 9 + + def execute(self): + # Prelaunch hook is optional + settings = self.data["project_settings"][self.host_name] + if not settings["hooks"]["FusionLaunchMenuHook"]["enabled"]: + return + + variant = self.application.name + if variant.isnumeric(): + version = int(variant) + if version < 18: + print("Skipping launch of OpenPype menu on Fusion start " + "because Fusion version below 18.0 does not support " + "/execute argument on launch. " + f"Version detected: {version}") + return + else: + print(f"Application variant is not numeric: {variant}. " + "Validation for Fusion version 18+ for /execute " + "prelaunch argument skipped.") + + path = os.path.join(FUSION_HOST_DIR, + "deploy", + "MenuScripts", + "launch_menu.py").replace("\\", "/") + script = f"fusion:RunScript('{path}')" + self.launch_context.launch_args.extend(["/execute", script]) diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py index 10b1c9c45d..1064d0a83a 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py @@ -7,7 +7,7 @@ from ayon_core.hosts.fusion import ( FUSION_VERSIONS_DICT, get_fusion_version, ) -from ayon_core.lib.applications import ( +from ayon_applications import ( PreLaunchHook, LaunchTypes, ApplicationLaunchFailed, diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py index 5e97ae3de1..ef084b0483 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import ( +from ayon_applications import ( PreLaunchHook, LaunchTypes, ApplicationLaunchFailed, diff --git a/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py b/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py index a9db39e24e..ab12078c43 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py @@ -3,7 +3,7 @@ import subprocess import platform import uuid -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class InstallPySideToFusion(PreLaunchHook): diff --git a/client/ayon_core/hosts/harmony/api/pipeline.py b/client/ayon_core/hosts/harmony/api/pipeline.py index d842ccd414..1e3ea0ba21 100644 --- a/client/ayon_core/hosts/harmony/api/pipeline.py +++ b/client/ayon_core/hosts/harmony/api/pipeline.py @@ -13,7 +13,7 @@ from ayon_core.pipeline import ( AVALON_CONTAINER_ID, ) from ayon_core.pipeline.load import get_outdated_containers -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.hosts.harmony import HARMONY_ADDON_ROOT import ayon_core.hosts.harmony.api as harmony @@ -50,7 +50,7 @@ def get_current_context_settings(): """ - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() folder_attributes = folder_entity["attrib"] fps = folder_attributes.get("fps") diff --git a/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py b/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py index bbad14084a..4d38cd09b3 100644 --- a/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py +++ b/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py @@ -6,10 +6,7 @@ from ayon_core.lib import ( get_ayon_launcher_args, is_using_ayon_console, ) -from ayon_core.lib.applications import ( - PreLaunchHook, - LaunchTypes, -) +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.harmony import get_launch_script_path diff --git a/client/ayon_core/hosts/hiero/api/lib.py b/client/ayon_core/hosts/hiero/api/lib.py index ecb3460fb4..8682ff7780 100644 --- a/client/ayon_core/hosts/hiero/api/lib.py +++ b/client/ayon_core/hosts/hiero/api/lib.py @@ -248,8 +248,12 @@ def get_track_items( # collect all available active sequence track items if not return_list: sequence = get_current_sequence(name=sequence_name) - # get all available tracks from sequence - tracks = list(sequence.audioTracks()) + list(sequence.videoTracks()) + tracks = [] + if sequence is not None: + # get all available tracks from sequence + tracks.extend(sequence.audioTracks()) + tracks.extend(sequence.videoTracks()) + # loop all tracks for track in tracks: if check_locked and track.isLocked(): diff --git a/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py b/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py index 2985a81317..1fc808fdd1 100644 --- a/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py +++ b/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py @@ -137,7 +137,7 @@ class CreateShotClip(phiero.Creator): "value": ["", "main", "bg", "fg", "bg", "animatic"], "type": "QComboBox", - "label": "pRODUCT Name", + "label": "Product Name", "target": "ui", "toolTip": "chose product name pattern, if is selected, name of track layer will be used", # noqa "order": 0}, @@ -159,7 +159,7 @@ class CreateShotClip(phiero.Creator): "type": "QCheckBox", "label": "Include audio", "target": "tag", - "toolTip": "Process productS with corresponding audio", # noqa + "toolTip": "Process products with corresponding audio", # noqa "order": 3}, "sourceResolution": { "value": False, diff --git a/client/ayon_core/hosts/hiero/plugins/publish/extract_thumbnail.py b/client/ayon_core/hosts/hiero/plugins/publish/extract_thumbnail.py index bcaf5308d9..3599a830d2 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/extract_thumbnail.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/extract_thumbnail.py @@ -4,12 +4,12 @@ import pyblish.api from ayon_core.pipeline import publish -class ExtractThumnail(publish.Extractor): +class ExtractThumbnail(publish.Extractor): """ - Extractor for track item's tumnails + Extractor for track item's tumbnails """ - label = "Extract Thumnail" + label = "Extract Thumbnail" order = pyblish.api.ExtractorOrder families = ["plate", "take"] hosts = ["hiero"] @@ -48,7 +48,7 @@ class ExtractThumnail(publish.Extractor): self.log.debug( "__ thumb_path: `{}`, frame: `{}`".format(thumbnail, thumb_frame)) - self.log.info("Thumnail was generated to: {}".format(thumb_path)) + self.log.info("Thumbnail was generated to: {}".format(thumb_path)) thumb_representation = { 'files': thumb_file, 'stagingDir': staging_dir, diff --git a/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py b/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py index d6fbcd7575..67e1f18cbf 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py @@ -90,7 +90,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if "entity_type" in parent: parent["folder_type"] = parent.pop("entity_type") - asset, asset_name = self._get_folder_data(tag_data) + folder_path, folder_name = self._get_folder_data(tag_data) product_name = tag_data.get("productName") if product_name is None: @@ -98,12 +98,6 @@ class PrecollectInstances(pyblish.api.ContextPlugin): families = [str(f) for f in tag_data["families"]] - # form label - label = "{} -".format(asset) - if asset_name != clip_name: - label += " ({})".format(clip_name) - label += " {}".format(product_name) - # TODO: remove backward compatibility product_name = tag_data.get("productName") if product_name is None: @@ -113,7 +107,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # backward compatibility: product_name should not be missing if not product_name: self.log.error( - "Product name is not defined for: {}".format(asset)) + "Product name is not defined for: {}".format(folder_path)) # TODO: remove backward compatibility product_type = tag_data.get("productType") @@ -124,15 +118,21 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # backward compatibility: product_type should not be missing if not product_type: self.log.error( - "Product type is not defined for: {}".format(asset)) + "Product type is not defined for: {}".format(folder_path)) + + # form label + label = "{} -".format(folder_path) + if folder_name != clip_name: + label += " ({})".format(clip_name) + label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, product_name), + "name": "{}_{}".format(folder_path, product_name), "label": label, - "folderPath": asset, - "asset_name": asset_name, "productName": product_name, "productType": product_type, + "folderPath": folder_path, + "asset_name": folder_name, "item": track_item, "families": families, "publish": tag_data["publish"], @@ -222,19 +222,19 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not hierarchy_data: return - asset = data["folderPath"] - asset_name = data["asset_name"] + folder_path = data["folderPath"] + folder_name = data["asset_name"] product_type = "shot" # form label - label = "{} -".format(asset) - if asset_name != clip_name: + label = "{} -".format(folder_path) + if folder_name != clip_name: label += " ({}) ".format(clip_name) label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, product_name), + "name": "{}_{}".format(folder_path, product_name), "label": label, "productName": product_name, "productType": product_type, @@ -281,19 +281,19 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not self.test_any_audio(item): return - asset = data["folderPath"] + folder_path = data["folderPath"] asset_name = data["asset_name"] product_type = "audio" # form label - label = "{} -".format(asset) + label = "{} -".format(folder_path) if asset_name != clip_name: label += " ({}) ".format(clip_name) label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, product_name), + "name": "{}_{}".format(folder_path, subset), "label": label, "productName": product_name, "productType": product_type, diff --git a/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py b/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py index 8df6cd4261..0b6b34ea6c 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py @@ -17,8 +17,8 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.491 def process(self, context): - asset = context.data["folderPath"] - asset_name = asset.split("/")[-1] + folder_path = context.data["folderPath"] + folder_name = folder_path.split("/")[-1] active_timeline = hiero.ui.activeSequence() project = active_timeline.project() @@ -62,12 +62,12 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): product_type = "workfile" instance_data = { "label": "{} - {}Main".format( - asset, product_type), - "name": "{}_{}".format(asset_name, product_type), - "folderPath": context.data["folderPath"], + folder_path, product_type), + "name": "{}_{}".format(folder_name, product_type), + "folderPath": folder_path, # TODO use 'get_product_name' "productName": "{}{}Main".format( - asset_name, product_type.capitalize() + folder_name, product_type.capitalize() ), "item": project, "productType": product_type, diff --git a/client/ayon_core/hosts/houdini/api/lib.py b/client/ayon_core/hosts/houdini/api/lib.py index a72118c276..972b10ad1b 100644 --- a/client/ayon_core/hosts/houdini/api/lib.py +++ b/client/ayon_core/hosts/houdini/api/lib.py @@ -22,7 +22,7 @@ from ayon_core.pipeline import ( ) from ayon_core.pipeline.create import CreateContext from ayon_core.pipeline.template_data import get_template_data -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.tools.utils import PopupUpdateKeys, SimplePopup from ayon_core.tools.utils.host_tools import get_tool_by_name @@ -39,7 +39,7 @@ def get_folder_fps(folder_entity=None): """Return current folder fps.""" if folder_entity is None: - folder_entity = get_current_project_folder(fields=["attrib.fps"]) + folder_entity = get_current_folder_entity(fields=["attrib.fps"]) return folder_entity["attrib"]["fps"] @@ -243,7 +243,10 @@ def render_rop(ropnode): try: ropnode.render(verbose=verbose, # Allow Deadline to capture completion percentage - output_progress=verbose) + output_progress=verbose, + # Render only this node + # (do not render any of its dependencies) + ignore_inputs=True) except hou.Error as exc: # The hou.Error is not inherited from a Python Exception class, # so we explicitly capture the houdini error, otherwise pyblish @@ -738,7 +741,7 @@ def set_camera_resolution(camera, folder_entity=None): """Apply resolution to camera from folder entity of the publish""" if not folder_entity: - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() resolution = get_resolution_from_folder(folder_entity) @@ -948,7 +951,7 @@ def self_publish(): Firstly, it gets the node and its dependencies. Then, it deactivates all other ROPs - And finaly, it triggers the publishing action. + And finally, it triggers the publishing action. """ result, comment = hou.ui.readInput( @@ -1076,4 +1079,4 @@ def prompt_reset_context(): if options["instances"]: update_content_on_context_change() - dialog.deleteLater() \ No newline at end of file + dialog.deleteLater() diff --git a/client/ayon_core/hosts/houdini/api/pipeline.py b/client/ayon_core/hosts/houdini/api/pipeline.py index b9446933ac..4797cf36a0 100644 --- a/client/ayon_core/hosts/houdini/api/pipeline.py +++ b/client/ayon_core/hosts/houdini/api/pipeline.py @@ -39,7 +39,7 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") # Track whether the workfile tool is about to save -ABOUT_TO_SAVE = False +_about_to_save = False class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): @@ -292,8 +292,8 @@ def ls(): def before_workfile_save(event): - global ABOUT_TO_SAVE - ABOUT_TO_SAVE = True + global _about_to_save + _about_to_save = True def before_save(): @@ -307,18 +307,14 @@ def on_save(): # update houdini vars lib.update_houdini_vars_context_dialog() - nodes = lib.get_id_required_nodes() - for node, new_id in lib.generate_ids(nodes): - lib.set_id(node, new_id, overwrite=False) - # We are now starting the actual save directly - global ABOUT_TO_SAVE - ABOUT_TO_SAVE = False + global _about_to_save + _about_to_save = False def on_task_changed(): - global ABOUT_TO_SAVE - if not IS_HEADLESS and ABOUT_TO_SAVE: + global _about_to_save + if not IS_HEADLESS and _about_to_save: # Let's prompt the user to update the context settings or not lib.prompt_reset_context() diff --git a/client/ayon_core/hosts/houdini/hooks/set_paths.py b/client/ayon_core/hosts/houdini/hooks/set_paths.py index 7eb346cc74..4b89ebe944 100644 --- a/client/ayon_core/hosts/houdini/hooks/set_paths.py +++ b/client/ayon_core/hosts/houdini/hooks/set_paths.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class SetPath(PreLaunchHook): diff --git a/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py b/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py index b813f82e2e..4cebd537bb 100644 --- a/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py +++ b/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py @@ -3,7 +3,7 @@ from ayon_core.hosts.houdini.api.lib import ( get_camera_from_container, set_camera_resolution ) -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity class SetCameraResolution(InventoryAction): @@ -19,7 +19,7 @@ class SetCameraResolution(InventoryAction): ) def process(self, containers): - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() for container in containers: node = container["node"] camera = get_camera_from_container(node) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py b/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py index 515ffa6027..f107190f96 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py @@ -2,7 +2,7 @@ import os import re from ayon_core.pipeline import load -from openpype.hosts.houdini.api import pipeline +from ayon_core.hosts.houdini.api import pipeline import hou @@ -103,8 +103,8 @@ class FilePathLoader(load.LoaderPlugin): parm) node.setParmTemplateGroup(parm_template_group) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): diff --git a/client/ayon_core/hosts/max/api/lib.py b/client/ayon_core/hosts/max/api/lib.py index 5f13856c9b..48bb15f538 100644 --- a/client/ayon_core/hosts/max/api/lib.py +++ b/client/ayon_core/hosts/max/api/lib.py @@ -11,7 +11,7 @@ import ayon_api from ayon_core.pipeline import get_current_project_name, colorspace from ayon_core.settings import get_project_settings from ayon_core.pipeline.context_tools import ( - get_current_project_folder, + get_current_folder_entity, ) from ayon_core.style import load_stylesheet from pymxs import runtime as rt @@ -222,7 +222,7 @@ def reset_scene_resolution(): contains any information regarding scene resolution. """ - folder_entity = get_current_project_folder( + folder_entity = get_current_folder_entity( fields={"attrib.resolutionWidth", "attrib.resolutionHeight"} ) folder_attributes = folder_entity["attrib"] @@ -243,7 +243,7 @@ def get_frame_range(folder_entiy=None) -> Union[Dict[str, Any], None]: """ # Set frame start/end if folder_entiy is None: - folder_entiy = get_current_project_folder() + folder_entiy = get_current_folder_entity() folder_attributes = folder_entiy["attrib"] frame_start = folder_attributes.get("frameStart") diff --git a/client/ayon_core/hosts/max/api/lib_rendersettings.py b/client/ayon_core/hosts/max/api/lib_rendersettings.py index 8a9881f032..35b6d064c1 100644 --- a/client/ayon_core/hosts/max/api/lib_rendersettings.py +++ b/client/ayon_core/hosts/max/api/lib_rendersettings.py @@ -3,7 +3,7 @@ from pymxs import runtime as rt from ayon_core.lib import Logger from ayon_core.settings import get_project_settings from ayon_core.pipeline import get_current_project_name -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.hosts.max.api.lib import ( set_render_frame_range, @@ -57,7 +57,7 @@ class RenderSettings(object): if not os.path.exists(output_dir): os.makedirs(output_dir) # hard-coded, should be customized in the setting - folder_attributes = get_current_project_folder()["attrib"] + folder_attributes = get_current_folder_entity()["attrib"] # get project resolution width = folder_attributes.get("resolutionWidth") diff --git a/client/ayon_core/hosts/max/api/pipeline.py b/client/ayon_core/hosts/max/api/pipeline.py index 4b1dcc25d3..675f36c24f 100644 --- a/client/ayon_core/hosts/max/api/pipeline.py +++ b/client/ayon_core/hosts/max/api/pipeline.py @@ -240,10 +240,10 @@ def get_previous_loaded_object(container: str): node_list(list): list of nodes which are previously loaded """ node_list = [] - sel_list = rt.getProperty(container.modifiers[0].openPypeData, "sel_list") - for obj in rt.Objects: - if str(obj) in sel_list: - node_list.append(obj) + node_transform_monitor_list = rt.getProperty( + container.modifiers[0].openPypeData, "all_handles") + for node_transform_monitor in node_transform_monitor_list: + node_list.append(node_transform_monitor.node) return node_list diff --git a/client/ayon_core/hosts/max/hooks/force_startup_script.py b/client/ayon_core/hosts/max/hooks/force_startup_script.py index 8ccd658e8f..417f0049ab 100644 --- a/client/ayon_core/hosts/max/hooks/force_startup_script.py +++ b/client/ayon_core/hosts/max/hooks/force_startup_script.py @@ -2,7 +2,7 @@ """Pre-launch to force 3ds max startup script.""" import os from ayon_core.hosts.max import MAX_HOST_DIR -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class ForceStartupScript(PreLaunchHook): diff --git a/client/ayon_core/hosts/max/hooks/inject_python.py b/client/ayon_core/hosts/max/hooks/inject_python.py index b1b36e75bd..fc9626ab87 100644 --- a/client/ayon_core/hosts/max/hooks/inject_python.py +++ b/client/ayon_core/hosts/max/hooks/inject_python.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Pre-launch hook to inject python environment.""" import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class InjectPythonPath(PreLaunchHook): diff --git a/client/ayon_core/hosts/max/hooks/set_paths.py b/client/ayon_core/hosts/max/hooks/set_paths.py index 0ee1b0dab7..f066de092e 100644 --- a/client/ayon_core/hosts/max/hooks/set_paths.py +++ b/client/ayon_core/hosts/max/hooks/set_paths.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class SetPath(PreLaunchHook): diff --git a/client/ayon_core/hosts/maya/api/action.py b/client/ayon_core/hosts/maya/api/action.py index baf558036e..d845ac6066 100644 --- a/client/ayon_core/hosts/maya/api/action.py +++ b/client/ayon_core/hosts/maya/api/action.py @@ -4,7 +4,10 @@ from __future__ import absolute_import import pyblish.api import ayon_api -from ayon_core.pipeline.publish import get_errored_instances_from_context +from ayon_core.pipeline.publish import ( + get_errored_instances_from_context, + get_errored_plugins_from_context +) class GenerateUUIDsOnInvalidAction(pyblish.api.Action): @@ -112,20 +115,25 @@ class SelectInvalidAction(pyblish.api.Action): except ImportError: raise ImportError("Current host is not Maya") - errored_instances = get_errored_instances_from_context(context, - plugin=plugin) - # Get the invalid nodes for the plug-ins self.log.info("Finding invalid nodes..") invalid = list() - for instance in errored_instances: - invalid_nodes = plugin.get_invalid(instance) - if invalid_nodes: - if isinstance(invalid_nodes, (list, tuple)): - invalid.extend(invalid_nodes) - else: - self.log.warning("Plug-in returned to be invalid, " - "but has no selectable nodes.") + if issubclass(plugin, pyblish.api.ContextPlugin): + errored_plugins = get_errored_plugins_from_context(context) + if plugin in errored_plugins: + invalid = plugin.get_invalid(context) + else: + errored_instances = get_errored_instances_from_context( + context, plugin=plugin + ) + for instance in errored_instances: + invalid_nodes = plugin.get_invalid(instance) + if invalid_nodes: + if isinstance(invalid_nodes, (list, tuple)): + invalid.extend(invalid_nodes) + else: + self.log.warning("Plug-in returned to be invalid, " + "but has no selectable nodes.") # Ensure unique (process each node only once) invalid = list(set(invalid)) diff --git a/client/ayon_core/hosts/maya/api/customize.py b/client/ayon_core/hosts/maya/api/customize.py index 4db8819ff5..16255f69ba 100644 --- a/client/ayon_core/hosts/maya/api/customize.py +++ b/client/ayon_core/hosts/maya/api/customize.py @@ -113,7 +113,9 @@ def override_toolbox_ui(): annotation="Look Manager", label="Look Manager", image=os.path.join(icons, "lookmanager.png"), - command=show_look_assigner, + command=lambda: show_look_assigner( + parent=parent_widget + ), width=icon_size, height=icon_size, parent=parent diff --git a/client/ayon_core/hosts/maya/api/lib.py b/client/ayon_core/hosts/maya/api/lib.py index b18d3a0c33..436c5df26c 100644 --- a/client/ayon_core/hosts/maya/api/lib.py +++ b/client/ayon_core/hosts/maya/api/lib.py @@ -37,7 +37,7 @@ from ayon_core.pipeline import ( AYON_CONTAINER_ID, ) from ayon_core.lib import NumberDef -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_task_entity from ayon_core.pipeline.create import CreateContext from ayon_core.lib.profiles_filtering import filter_profiles @@ -1876,18 +1876,9 @@ def list_looks(project_name, folder_id): list[dict[str, Any]]: List of look products. """ - # # get all products with look leading in - # the name associated with the asset - # TODO this should probably look for product type 'look' instead of - # checking product name that can not start with product type - product_entities = ayon_api.get_products( - project_name, folder_ids=[folder_id] - ) - return [ - product_entity - for product_entity in product_entities - if product_entity["name"].startswith("look") - ] + return list(ayon_api.get_products( + project_name, folder_ids=[folder_id], product_types={"look"} + )) def assign_look_by_version(nodes, version_id): @@ -1906,12 +1897,15 @@ def assign_look_by_version(nodes, version_id): project_name = get_current_project_name() # Get representations of shader file and relationships - look_representation = ayon_api.get_representation_by_name( - project_name, "ma", version_id - ) - json_representation = ayon_api.get_representation_by_name( - project_name, "json", version_id - ) + representations = list(ayon_api.get_representations( + project_name=project_name, + representation_names={"ma", "json"}, + version_ids=[version_id] + )) + look_representation = next( + repre for repre in representations if repre["name"] == "ma") + json_representation = next( + repre for repre in representations if repre["name"] == "json") # See if representation is already loaded, if so reuse it. host = registered_host() @@ -1948,7 +1942,7 @@ def assign_look_by_version(nodes, version_id): apply_shaders(relationships, shader_nodes, nodes) -def assign_look(nodes, product_name="lookDefault"): +def assign_look(nodes, product_name="lookMain"): """Assigns a look to a node. Optimizes the nodes by grouping by folder id and finding @@ -1981,14 +1975,10 @@ def assign_look(nodes, product_name="lookDefault"): product_entity["id"] for product_entity in product_entities_by_folder_id.values() } - last_version_entities = ayon_api.get_last_versions( + last_version_entities_by_product_id = ayon_api.get_last_versions( project_name, product_ids ) - last_version_entities_by_product_id = { - last_version_entity["productId"]: last_version_entity - for last_version_entity in last_version_entities - } for folder_id, asset_nodes in grouped.items(): product_entity = product_entities_by_folder_id.get(folder_id) @@ -2125,22 +2115,6 @@ def get_related_sets(node): """ - # Ignore specific suffices - ignore_suffices = ["out_SET", "controls_SET", "_INST", "_CON"] - - # Default nodes to ignore - defaults = {"defaultLightSet", "defaultObjectSet"} - - # Ids to ignore - ignored = { - AVALON_INSTANCE_ID, - AVALON_CONTAINER_ID, - AYON_INSTANCE_ID, - AYON_CONTAINER_ID, - } - - view_sets = get_isolate_view_sets() - sets = cmds.listSets(object=node, extendToShape=False) if not sets: return [] @@ -2151,6 +2125,14 @@ def get_related_sets(node): # returned by `cmds.listSets(allSets=True)` sets = cmds.ls(sets) + # Ids to ignore + ignored = { + AVALON_INSTANCE_ID, + AVALON_CONTAINER_ID, + AYON_INSTANCE_ID, + AYON_CONTAINER_ID, + } + # Ignore `avalon.container` sets = [ s for s in sets @@ -2159,21 +2141,31 @@ def get_related_sets(node): or cmds.getAttr(f"{s}.id") not in ignored ) ] + if not sets: + return sets # Exclude deformer sets (`type=2` for `maya.cmds.listSets`) - deformer_sets = cmds.listSets(object=node, - extendToShape=False, - type=2) or [] - deformer_sets = set(deformer_sets) # optimize lookup - sets = [s for s in sets if s not in deformer_sets] + exclude_sets = cmds.listSets(object=node, + extendToShape=False, + type=2) or [] + exclude_sets = set(exclude_sets) # optimize lookup + + # Default nodes to ignore + exclude_sets.update({"defaultLightSet", "defaultObjectSet"}) + + # Filter out the sets to exclude + sets = [s for s in sets if s not in exclude_sets] # Ignore when the set has a specific suffix - sets = [s for s in sets if not any(s.endswith(x) for x in ignore_suffices)] + ignore_suffices = ("out_SET", "controls_SET", "_INST", "_CON") + sets = [s for s in sets if not s.endswith(ignore_suffices)] + if not sets: + return sets # Ignore viewport filter view sets (from isolate select and # viewports) + view_sets = get_isolate_view_sets() sets = [s for s in sets if s not in view_sets] - sets = [s for s in sets if s not in defaults] return sets @@ -2444,12 +2436,10 @@ def set_scene_fps(fps, update=True): cmds.currentUnit(time=unit, updateAnimation=update) # Set time slider data back to previous state - cmds.playbackOptions(edit=True, minTime=start_frame) - cmds.playbackOptions(edit=True, maxTime=end_frame) - - # Set animation data - cmds.playbackOptions(edit=True, animationStartTime=animation_start) - cmds.playbackOptions(edit=True, animationEndTime=animation_end) + cmds.playbackOptions(minTime=start_frame, + maxTime=end_frame, + animationStartTime=animation_start, + animationEndTime=animation_end) cmds.currentTime(current_frame, edit=True, update=True) @@ -2525,7 +2515,7 @@ def get_fps_for_current_context(): def get_frame_range(include_animation_range=False): - """Get the current folder frame range and handles. + """Get the current task frame range and handles. Args: include_animation_range (bool, optional): Whether to include @@ -2533,25 +2523,34 @@ def get_frame_range(include_animation_range=False): range of the timeline. It is excluded by default. Returns: - dict: Folder's expected frame range values. + dict: Task's expected frame range values. """ # Set frame start/end project_name = get_current_project_name() folder_path = get_current_folder_path() - folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) - folder_attributes = folder_entity["attrib"] + task_name = get_current_task_name() - frame_start = folder_attributes.get("frameStart") - frame_end = folder_attributes.get("frameEnd") + folder_entity = ayon_api.get_folder_by_path( + project_name, + folder_path, + fields={"id"}) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + + task_attributes = task_entity["attrib"] + + frame_start = task_attributes.get("frameStart") + frame_end = task_attributes.get("frameEnd") if frame_start is None or frame_end is None: cmds.warning("No edit information found for '{}'".format(folder_path)) return - handle_start = folder_attributes.get("handleStart") or 0 - handle_end = folder_attributes.get("handleEnd") or 0 + handle_start = task_attributes.get("handleStart") or 0 + handle_end = task_attributes.get("handleEnd") or 0 frame_range = { "frameStart": frame_start, @@ -2565,14 +2564,10 @@ def get_frame_range(include_animation_range=False): # Some usages of this function use the full dictionary to define # instance attributes for which we want to exclude the animation # keys. That is why these are excluded by default. - task_name = get_current_task_name() + settings = get_project_settings(project_name) - task_entity = ayon_api.get_task_by_name( - project_name, folder_entity["id"], task_name - ) - task_type = None - if task_entity: - task_type = task_entity["taskType"] + + task_type = task_entity["taskType"] include_handles_settings = settings["maya"]["include_handles"] @@ -2634,21 +2629,21 @@ def reset_frame_range(playback=True, render=True, fps=True): def reset_scene_resolution(): """Apply the scene resolution from the project definition - scene resolution can be overwritten by an folder if the folder.attrib - contains any information regarding scene resolution . + The scene resolution will be retrieved from the current task entity's + attributes. Returns: None """ - folder_attributes = get_current_project_folder()["attrib"] + task_attributes = get_current_task_entity(fields={"attrib"})["attrib"] # Set resolution - width = folder_attributes.get("resolutionWidth", 1920) - height = folder_attributes.get("resolutionHeight", 1080) - pixelAspect = folder_attributes.get("pixelAspect", 1) + width = task_attributes.get("resolutionWidth", 1920) + height = task_attributes.get("resolutionHeight", 1080) + pixel_aspect = task_attributes.get("pixelAspect", 1) - set_scene_resolution(width, height, pixelAspect) + set_scene_resolution(width, height, pixel_aspect) def set_context_settings( @@ -3134,7 +3129,7 @@ def load_capture_preset(data): return options -def get_attr_in_layer(attr, layer): +def get_attr_in_layer(attr, layer, as_string=True): """Return attribute value in specified renderlayer. Same as cmds.getAttr but this gets the attribute's value in a @@ -3152,6 +3147,7 @@ def get_attr_in_layer(attr, layer): Args: attr (str): attribute name, ex. "node.attribute" layer (str): layer name + as_string (bool): whether attribute should convert to a string value Returns: The return value from `maya.cmds.getAttr` @@ -3161,7 +3157,8 @@ def get_attr_in_layer(attr, layer): try: if cmds.mayaHasRenderSetup(): from . import lib_rendersetup - return lib_rendersetup.get_attr_in_layer(attr, layer) + return lib_rendersetup.get_attr_in_layer( + attr, layer, as_string=as_string) except AttributeError: pass @@ -3169,7 +3166,7 @@ def get_attr_in_layer(attr, layer): current_layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True) if layer == current_layer: - return cmds.getAttr(attr) + return cmds.getAttr(attr, asString=as_string) connections = cmds.listConnections(attr, plugs=True, @@ -3220,7 +3217,7 @@ def get_attr_in_layer(attr, layer): value *= conversion return value - return cmds.getAttr(attr) + return cmds.getAttr(attr, asString=as_string) def fix_incompatible_containers(): @@ -3249,33 +3246,46 @@ def update_content_on_context_change(): """ This will update scene content to match new folder on context change """ - scene_sets = cmds.listSets(allSets=True) - folder_entity = get_current_project_folder() - folder_attributes = folder_entity["attrib"] - new_folder_path = folder_entity["path"] - for s in scene_sets: - try: - if cmds.getAttr("{}.id".format(s)) in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - }: - attr = cmds.listAttr(s) - print(s) - if "folderPath" in attr: - print( - " - setting folder to: [ {} ]".format(new_folder_path) - ) - cmds.setAttr( - "{}.folderPath".format(s), - new_folder_path, type="string" - ) - if "frameStart" in attr: - cmds.setAttr("{}.frameStart".format(s), - folder_attributes["frameStart"]) - if "frameEnd" in attr: - cmds.setAttr("{}.frameEnd".format(s), - folder_attributes["frameEnd"],) - except ValueError: - pass + + host = registered_host() + create_context = CreateContext(host) + folder_entity = get_current_task_entity(fields={"attrib"}) + + instance_values = { + "folderPath": create_context.get_current_folder_path(), + "task": create_context.get_current_task_name(), + } + creator_attribute_values = { + "frameStart": folder_entity["attrib"]["frameStart"], + "frameEnd": folder_entity["attrib"]["frameEnd"], + } + + has_changes = False + for instance in create_context.instances: + for key, value in instance_values.items(): + if key not in instance or instance[key] == value: + continue + + # Update instance value + print(f"Updating {instance.product_name} {key} to: {value}") + instance[key] = value + has_changes = True + + creator_attributes = instance.creator_attributes + for key, value in creator_attribute_values.items(): + if ( + key not in creator_attributes + or creator_attributes[key] == value + ): + continue + + # Update instance creator attribute value + print(f"Updating {instance.product_name} {key} to: {value}") + instance[key] = value + has_changes = True + + if has_changes: + create_context.save_changes() def show_message(title, msg): @@ -4009,17 +4019,26 @@ def len_flattened(components): return n -def get_all_children(nodes): +def get_all_children(nodes, ignore_intermediate_objects=False): """Return all children of `nodes` including each instanced child. Using maya.cmds.listRelatives(allDescendents=True) includes only the first instance. As such, this function acts as an optimal replacement with a focus on a fast query. + Args: + nodes (iterable): List of nodes to get children for. + ignore_intermediate_objects (bool): Ignore any children that + are intermediate objects. + + Returns: + set: Children of input nodes. + """ sel = OpenMaya.MSelectionList() traversed = set() iterator = OpenMaya.MItDag(OpenMaya.MItDag.kDepthFirst) + fn_dag = OpenMaya.MFnDagNode() for node in nodes: if node in traversed: @@ -4036,6 +4055,13 @@ def get_all_children(nodes): iterator.next() # noqa: B305 while not iterator.isDone(): + if ignore_intermediate_objects: + fn_dag.setObject(iterator.currentItem()) + if fn_dag.isIntermediateObject: + iterator.prune() + iterator.next() # noqa: B305 + continue + path = iterator.fullPathName() if path in traversed: @@ -4046,7 +4072,7 @@ def get_all_children(nodes): traversed.add(path) iterator.next() # noqa: B305 - return list(traversed) + return traversed def get_capture_preset( diff --git a/client/ayon_core/hosts/maya/api/lib_renderproducts.py b/client/ayon_core/hosts/maya/api/lib_renderproducts.py index 7f26145e1d..832d1c21c2 100644 --- a/client/ayon_core/hosts/maya/api/lib_renderproducts.py +++ b/client/ayon_core/hosts/maya/api/lib_renderproducts.py @@ -297,7 +297,7 @@ class ARenderProducts: """ return self._get_attr("defaultRenderGlobals", attribute) - def _get_attr(self, node_attr, attribute=None): + def _get_attr(self, node_attr, attribute=None, as_string=True): """Return the value of the attribute in the renderlayer For readability this allows passing in the attribute in two ways. @@ -317,7 +317,7 @@ class ARenderProducts: else: plug = "{}.{}".format(node_attr, attribute) - return lib.get_attr_in_layer(plug, layer=self.layer) + return lib.get_attr_in_layer(plug, layer=self.layer, as_string=as_string) @staticmethod def extract_separator(file_prefix): @@ -1133,9 +1133,24 @@ class RenderProductsRedshift(ARenderProducts): aovs = list(set(aovs) - set(ref_aovs)) products = [] + global_aov_enabled = bool( + self._get_attr("redshiftOptions.aovGlobalEnableMode", as_string=False) + ) + colorspace = lib.get_color_management_output_transform() + if not global_aov_enabled: + # only beauty output + for camera in cameras: + products.insert(0, + RenderProduct(productName="", + ext=ext, + multipart=self.multipart, + camera=camera, + colorspace=colorspace)) + return products + light_groups_enabled = False has_beauty_aov = False - colorspace = lib.get_color_management_output_transform() + for aov in aovs: enabled = self._get_attr(aov, "enabled") if not enabled: diff --git a/client/ayon_core/hosts/maya/api/lib_rendersettings.py b/client/ayon_core/hosts/maya/api/lib_rendersettings.py index 905e8c69af..f9e243146a 100644 --- a/client/ayon_core/hosts/maya/api/lib_rendersettings.py +++ b/client/ayon_core/hosts/maya/api/lib_rendersettings.py @@ -7,7 +7,7 @@ from ayon_core.lib import Logger from ayon_core.settings import get_project_settings from ayon_core.pipeline import CreatorError, get_current_project_name -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.hosts.maya.api.lib import reset_frame_range @@ -77,7 +77,7 @@ class RenderSettings(object): renderer = cmds.getAttr( 'defaultRenderGlobals.currentRenderer').lower() - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() folder_attributes = folder_entity["attrib"] # project_settings/maya/create/CreateRender/aov_separator try: diff --git a/client/ayon_core/hosts/maya/api/lib_rendersetup.py b/client/ayon_core/hosts/maya/api/lib_rendersetup.py index c2b5ec843c..6dca8eb6dd 100644 --- a/client/ayon_core/hosts/maya/api/lib_rendersetup.py +++ b/client/ayon_core/hosts/maya/api/lib_rendersetup.py @@ -77,7 +77,7 @@ def get_rendersetup_layer(layer): if conn.endswith(".legacyRenderLayer")), None) -def get_attr_in_layer(node_attr, layer): +def get_attr_in_layer(node_attr, layer, as_string=True): """Return attribute value in Render Setup layer. This will only work for attributes which can be @@ -124,7 +124,7 @@ def get_attr_in_layer(node_attr, layer): node = history_overrides[-1] if history_overrides else override node_attr_ = node + ".original" - return get_attribute(node_attr_, asString=True) + return get_attribute(node_attr_, asString=as_string) layer = get_rendersetup_layer(layer) rs = renderSetup.instance() @@ -144,7 +144,7 @@ def get_attr_in_layer(node_attr, layer): # we will let it error out. rs.switchToLayer(current_layer) - return get_attribute(node_attr, asString=True) + return get_attribute(node_attr, asString=as_string) overrides = get_attr_overrides(node_attr, layer) default_layer_value = get_default_layer_value(node_attr) diff --git a/client/ayon_core/hosts/maya/api/pipeline.py b/client/ayon_core/hosts/maya/api/pipeline.py index 2be452a22a..eb46088ecd 100644 --- a/client/ayon_core/hosts/maya/api/pipeline.py +++ b/client/ayon_core/hosts/maya/api/pipeline.py @@ -68,7 +68,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") AVALON_CONTAINERS = ":AVALON_CONTAINERS" # Track whether the workfile tool is about to save -ABOUT_TO_SAVE = False +_about_to_save = False class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): @@ -585,8 +585,8 @@ def on_save(): lib.set_id(node, new_id, overwrite=False) # We are now starting the actual save directly - global ABOUT_TO_SAVE - ABOUT_TO_SAVE = False + global _about_to_save + _about_to_save = False def on_open(): @@ -653,12 +653,8 @@ def on_task_changed(): "Can't set project for new context because path does not exist: {}" ).format(workdir)) - with lib.suspended_refresh(): - lib.set_context_settings() - lib.update_content_on_context_change() - - global ABOUT_TO_SAVE - if not lib.IS_HEADLESS and ABOUT_TO_SAVE: + global _about_to_save + if not lib.IS_HEADLESS and _about_to_save: # Let's prompt the user to update the context settings or not lib.prompt_reset_context() @@ -676,8 +672,8 @@ def before_workfile_save(event): if workdir_path: create_workspace_mel(workdir_path, project_name) - global ABOUT_TO_SAVE - ABOUT_TO_SAVE = True + global _about_to_save + _about_to_save = True def workfile_save_before_xgen(event): diff --git a/client/ayon_core/hosts/maya/api/workfile_template_builder.py b/client/ayon_core/hosts/maya/api/workfile_template_builder.py index cb25a722f0..75386d7e64 100644 --- a/client/ayon_core/hosts/maya/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/maya/api/workfile_template_builder.py @@ -286,7 +286,7 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): if not container: return - roots = cmds.sets(container, q=True) + roots = cmds.sets(container, q=True) or [] ref_node = None try: ref_node = get_reference_node(roots) diff --git a/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py b/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py index ed294da125..45785ac354 100644 --- a/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py +++ b/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class MayaPreAutoLoadPlugins(PreLaunchHook): diff --git a/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py b/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py index 3fd81ceff4..683b4c59c7 100644 --- a/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py +++ b/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.maya.lib import create_workspace_mel diff --git a/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py b/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py index 6bf678474f..a54f17c6c6 100644 --- a/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py +++ b/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class MayaPreOpenWorkfilePostInitialization(PreLaunchHook): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_render.py b/client/ayon_core/hosts/maya/plugins/create/create_render.py index 213d5b543e..e5a8d4dbd8 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_render.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_render.py @@ -40,8 +40,15 @@ class CreateRenderlayer(plugin.RenderlayerCreator): def create(self, product_name, instance_data, pre_create_data): # Only allow a single render instance to exist if self._get_singleton_node(): - raise CreatorError("A Render instance already exists - only " - "one can be configured.") + raise CreatorError( + "A Render instance already exists - only one can be " + "configured.\n\n" + "To render multiple render layers, create extra Render Setup " + "Layers via Maya's Render Setup UI.\n" + "Then refresh the publisher to detect the new layers for " + "rendering.\n\n" + "With a render instance present all Render Setup layers in " + "your workfile are renderable instances.") # Apply default project render settings on create if self.render_settings.get("apply_render_settings"): diff --git a/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py b/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py index 6f20e677f0..7096f86e35 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py @@ -9,7 +9,9 @@ instance. import json import sys import six +import contextlib +from ayon_core.lib import BoolDef, EnumDef from ayon_core.pipeline import ( load, get_representation_path @@ -21,6 +23,31 @@ from maya import cmds import maya.app.renderSetup.model.renderSetup as renderSetup +@contextlib.contextmanager +def mark_all_imported(enabled): + """Mark all imported nodes accepted by removing the `imported` attribute""" + if not enabled: + yield + return + + node_types = cmds.pluginInfo("renderSetup", query=True, dependNode=True) + + # Get node before load, then we can disable `imported` + # attribute on all new render setup layers after import + before = cmds.ls(type=node_types, long=True) + try: + yield + finally: + after = cmds.ls(type=node_types, long=True) + for node in (node for node in after if node not in before): + if cmds.attributeQuery("imported", + node=node, + exists=True): + plug = "{}.imported".format(node) + if cmds.getAttr(plug): + cmds.deleteAttr(plug) + + class RenderSetupLoader(load.LoaderPlugin): """Load json preset for RenderSetup overwriting current one.""" @@ -32,48 +59,79 @@ class RenderSetupLoader(load.LoaderPlugin): icon = "tablet" color = "orange" + options = [ + BoolDef("accept_import", + label="Accept import on load", + tooltip=( + "By default importing or pasting Render Setup collections " + "will display them italic in the Render Setup list.\nWith " + "this enabled the load will directly mark the import " + "'accepted' and remove the italic view." + ), + default=True), + BoolDef("load_managed", + label="Load Managed", + tooltip=( + "Containerize the rendersetup on load so it can be " + "'updated' later." + ), + default=True), + EnumDef("import_mode", + label="Import mode", + items={ + renderSetup.DECODE_AND_OVERWRITE: ( + "Flush existing render setup and " + "add without any namespace" + ), + renderSetup.DECODE_AND_MERGE: ( + "Merge with the existing render setup objects and " + "rename the unexpected objects" + ), + renderSetup.DECODE_AND_RENAME: ( + "Renaming all decoded render setup objects to not " + "conflict with the existing render setup" + ), + }, + default=renderSetup.DECODE_AND_OVERWRITE) + ] + def load(self, context, name, namespace, data): """Load RenderSetup settings.""" - # from ayon_core.hosts.maya.api.lib import namespaced - - folder_name = context["folder"]["name"] - namespace = namespace or lib.unique_namespace( - folder_name + "_", - prefix="_" if folder_name[0].isdigit() else "", - suffix="_", - ) path = self.filepath_from_context(context) + + accept_import = data.get("accept_import", True) + import_mode = data.get("import_mode", renderSetup.DECODE_AND_OVERWRITE) + self.log.info(">>> loading json [ {} ]".format(path)) - with open(path, "r") as file: - renderSetup.instance().decode( - json.load(file), renderSetup.DECODE_AND_OVERWRITE, None) + with mark_all_imported(accept_import): + with open(path, "r") as file: + renderSetup.instance().decode( + json.load(file), import_mode, None) - nodes = [] - null = cmds.sets(name="null_SET", empty=True) - nodes.append(null) + if data.get("load_managed", True): + self.log.info(">>> containerising [ {} ]".format(name)) + folder_name = context["folder"]["name"] + namespace = namespace or lib.unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) - self[:] = nodes - if not nodes: - return - - self.log.info(">>> containerising [ {} ]".format(name)) - return containerise( - name=name, - namespace=namespace, - nodes=nodes, - context=context, - loader=self.__class__.__name__) + return containerise( + name=name, + namespace=namespace, + nodes=[], + context=context, + loader=self.__class__.__name__) def remove(self, container): """Remove RenderSetup settings instance.""" - from maya import cmds - container_name = container["objectName"] self.log.info("Removing '%s' from Maya.." % container["name"]) - container_content = cmds.sets(container_name, query=True) + container_content = cmds.sets(container_name, query=True) or [] nodes = cmds.ls(container_content, long=True) nodes.append(container_name) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py b/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py index 2d621353e6..0db89bee31 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py @@ -46,11 +46,18 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin): self.log.debug("data: {}".format(instance.data)) def get_hierarchy(self, nodes): - """Return nodes with all their children""" + """Return nodes with all their children. + + Arguments: + nodes (List[str]): List of nodes to collect children hierarchy for + + Returns: + list: Input nodes with their children hierarchy + + """ nodes = cmds.ls(nodes, long=True) if not nodes: return [] - children = get_all_children(nodes) - # Make sure nodes merged with children only - # contains unique entries - return list(set(nodes + children)) + + children = get_all_children(nodes, ignore_intermediate_objects=True) + return list(children.union(nodes)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py b/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py index 94fcc834e1..93b46c511b 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py @@ -1,5 +1,3 @@ -import json - from maya import cmds import pyblish.api @@ -11,18 +9,24 @@ class CollectFileDependencies(pyblish.api.ContextPlugin): label = "Collect File Dependencies" order = pyblish.api.CollectorOrder - 0.49 hosts = ["maya"] + families = ["renderlayer"] + + @classmethod + def apply_settings(cls, project_settings, system_settings): + # Disable plug-in if not used for deadline submission anyway + settings = project_settings["deadline"]["publish"]["MayaSubmitDeadline"] # noqa + cls.enabled = settings.get("asset_dependencies", True) def process(self, context): - dependencies = [] + dependencies = set() for node in cmds.ls(type="file"): path = cmds.getAttr("{}.{}".format(node, "fileTextureName")) if path not in dependencies: - dependencies.append(path) + dependencies.add(path) for node in cmds.ls(type="AlembicNode"): path = cmds.getAttr("{}.{}".format(node, "abc_File")) if path not in dependencies: - dependencies.append(path) + dependencies.add(path) - context.data["fileDependencies"] = dependencies - self.log.debug(json.dumps(dependencies, indent=4)) + context.data["fileDependencies"] = list(dependencies) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py b/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py index 85be15bb7b..774c217cfd 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py @@ -48,15 +48,15 @@ class CollectNewInstances(pyblish.api.InstancePlugin): # Collect members members = cmds.ls(members, long=True) or [] + # Collect full hierarchy dag_members = cmds.ls(members, type="dagNode", long=True) - children = get_all_children(dag_members) - children = cmds.ls(children, noIntermediate=True, long=True) - parents = ( - self.get_all_parents(members) - if creator_attributes.get("includeParentHierarchy", True) - else [] - ) - members_hierarchy = list(set(members + children + parents)) + children = get_all_children(dag_members, + ignore_intermediate_objects=True) + + members_hierarchy = set(members) + members_hierarchy.update(children) + if creator_attributes.get("includeParentHierarchy", True): + members_hierarchy.update(self.get_all_parents(dag_members)) instance[:] = members_hierarchy @@ -97,16 +97,16 @@ class CollectNewInstances(pyblish.api.InstancePlugin): """Get all parents by using string operations (optimization) Args: - nodes (list): the nodes which are found in the objectSet + nodes (iterable): the nodes which are found in the objectSet Returns: - list + set """ - parents = [] + parents = set() for node in nodes: splitted = node.split("|") items = ["|".join(splitted[0:i]) for i in range(2, len(splitted))] - parents.extend(items) + parents.update(items) - return list(set(parents)) + return parents diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_render.py b/client/ayon_core/hosts/maya/plugins/publish/collect_render.py index ff959afabc..21095935a2 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_render.py @@ -1,24 +1,19 @@ # -*- coding: utf-8 -*- """Collect render data. -This collector will go through render layers in maya and prepare all data -needed to create instances and their representations for submission and -publishing on farm. +This collector will go through renderlayer instances and prepare all data +needed to detect the expected rendered files for a layer, with resolution, +frame ranges and collects the data needed for publishing on the farm. Requires: instance -> families - instance -> setMembers - instance -> folderPath context -> currentFile - context -> workspaceDir context -> user -Optional: - Provides: instance -> label - instance -> productName + instance -> subset instance -> attachTo instance -> setMembers instance -> publish @@ -26,6 +21,8 @@ Provides: instance -> frameEnd instance -> byFrameStep instance -> renderer + instance -> family + instance -> asset instance -> time instance -> author instance -> source @@ -71,8 +68,6 @@ class CollectMayaRender(pyblish.api.InstancePlugin): # TODO: Re-add force enable of workfile instance? # TODO: Re-add legacy layer support with LAYER_ prefix but in Creator - # TODO: Set and collect active state of RenderLayer in Creator using - # renderlayer.isRenderable() context = instance.context layer = instance.data["transientData"]["layer"] @@ -112,7 +107,13 @@ class CollectMayaRender(pyblish.api.InstancePlugin): except UnsupportedRendererException as exc: raise KnownPublishError(exc) render_products = layer_render_products.layer_data.products - assert render_products, "no render products generated" + if not render_products: + self.log.error( + "No render products generated for '%s'. You might not have " + "any render camera in the renderlayer or render end frame is " + "lower than start frame.", + instance.name + ) expected_files = [] multipart = False for product in render_products: @@ -130,16 +131,21 @@ class CollectMayaRender(pyblish.api.InstancePlugin): }) has_cameras = any(product.camera for product in render_products) - assert has_cameras, "No render cameras found." - - self.log.debug("multipart: {}".format( - multipart)) - assert expected_files, "no file names were generated, this is a bug" - self.log.debug( - "expected files: {}".format( - json.dumps(expected_files, indent=4, sort_keys=True) + if render_products and not has_cameras: + self.log.error( + "No render cameras found for: %s", + instance ) - ) + if not expected_files: + self.log.warning( + "No file names were generated, this is a bug.") + + for render_product in render_products: + self.log.debug(render_product) + self.log.debug("multipart: {}".format(multipart)) + self.log.debug("expected files: {}".format( + json.dumps(expected_files, indent=4, sort_keys=True) + )) # if we want to attach render to product, check if we have AOV's # in expectedFiles. If so, raise error as we cannot attach AOV @@ -151,14 +157,14 @@ class CollectMayaRender(pyblish.api.InstancePlugin): ) # append full path - aov_dict = {} image_directory = os.path.join( cmds.workspace(query=True, rootDirectory=True), cmds.workspace(fileRuleEntry="images") ) # replace relative paths with absolute. Render products are # returned as list of dictionaries. - publish_meta_path = None + publish_meta_path = "NOT-SET" + aov_dict = {} for aov in expected_files: full_paths = [] aov_first_key = list(aov.keys())[0] @@ -169,14 +175,6 @@ class CollectMayaRender(pyblish.api.InstancePlugin): publish_meta_path = os.path.dirname(full_path) aov_dict[aov_first_key] = full_paths full_exp_files = [aov_dict] - self.log.debug(full_exp_files) - - if publish_meta_path is None: - raise KnownPublishError("Unable to detect any expected output " - "images for: {}. Make sure you have a " - "renderable camera and a valid frame " - "range set for your renderlayer." - "".format(instance.name)) frame_start_render = int(self.get_render_attribute( "startFrame", layer=layer_name)) @@ -222,7 +220,8 @@ class CollectMayaRender(pyblish.api.InstancePlugin): common_publish_meta_path = "/" + common_publish_meta_path self.log.debug( - "Publish meta path: {}".format(common_publish_meta_path)) + "Publish meta path: {}".format(common_publish_meta_path) + ) # Get layer specific settings, might be overrides colorspace_data = lib.get_color_management_preferences() diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py b/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py index 19825b769c..4b293b5785 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py @@ -5,7 +5,8 @@ from maya import cmds from ayon_core.pipeline import publish -class ExtractGPUCache(publish.Extractor): +class ExtractGPUCache(publish.Extractor, + publish.OptionalPyblishPluginMixin): """Extract the content of the instance to a GPU cache file.""" label = "GPU Cache" @@ -20,6 +21,9 @@ class ExtractGPUCache(publish.Extractor): useBaseTessellation = True def process(self, instance): + if not self.is_active(instance.data): + return + cmds.loadPlugin("gpuCache", quiet=True) staging_dir = self.staging_dir(instance) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py b/client/ayon_core/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py index 4590c53931..045e22545c 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py @@ -1,14 +1,18 @@ +import inspect + import pyblish.api from maya import cmds from ayon_core.pipeline.publish import ( context_plugin_should_run, + PublishValidationError, OptionalPyblishPluginMixin ) + class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin, OptionalPyblishPluginMixin): - """Validate if current render layer has a renderable camera + """Validate if current render layer has a renderable camera. There is a bug in Redshift which occurs when the current render layer at file open has no renderable camera. The error raised is as follows: @@ -32,8 +36,39 @@ class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin, if not context_plugin_should_run(self, context): return - layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True) + # This validator only makes sense when publishing renderlayer instances + # with Redshift. We skip validation if there isn't any. + if not any(self.is_active_redshift_render_instance(instance) + for instance in context): + return + cameras = cmds.ls(type="camera", long=True) renderable = any(c for c in cameras if cmds.getAttr(c + ".renderable")) - assert renderable, ("Current render layer '%s' has no renderable " - "camera" % layer) + if not renderable: + layer = cmds.editRenderLayerGlobals(query=True, + currentRenderLayer=True) + raise PublishValidationError( + "Current render layer '{}' has no renderable camera".format( + layer + ), + description=inspect.getdoc(self) + ) + + @staticmethod + def is_active_redshift_render_instance(instance) -> bool: + """Return whether instance is an active renderlayer instance set to + render with Redshift renderer.""" + if not instance.data.get("active", True): + return False + + # Check this before families just because it's a faster check + if not instance.data.get("renderer") == "redshift": + return False + + families = set() + families.add(instance.data.get("family")) + families.update(instance.data.get("families", [])) + if "renderlayer" not in families: + return False + + return True diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py b/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py index e70a805de4..070974aef5 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py @@ -47,10 +47,18 @@ class ValidateShadingEngine(pyblish.api.InstancePlugin, shape, destination=True, type="shadingEngine" ) or [] for shading_engine in shading_engines: - name = ( - cmds.listConnections(shading_engine + ".surfaceShader")[0] - + "SG" + materials = cmds.listConnections( + shading_engine + ".surfaceShader", + source=True, destination=False ) + if not materials: + cls.log.warning( + "Shading engine '{}' has no material connected to its " + ".surfaceShader attribute.".format(shading_engine)) + continue + + material = materials[0] # there should only ever be one input + name = material + "SG" if shading_engine != name: invalid.append(shading_engine) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py b/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py index f1c171bddc..47314b64ac 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py @@ -3,7 +3,6 @@ import maya.cmds as cmds import pyblish.api import ayon_core.hosts.maya.api.lib as mayalib -from ayon_core.pipeline.context_tools import get_current_project_folder from ayon_core.pipeline.publish import ( RepairContextAction, ValidateSceneOrder, @@ -131,6 +130,5 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin, cls.log.debug(current_linear) cls.log.info("Setting time unit to match project") - # TODO replace query with using 'context.data["folderEntity"]' - folder_entity = get_current_project_folder() + folder_entity = context.data["folderEntity"] mayalib.set_scene_fps(folder_entity["attrib"]["fps"]) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py b/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py index 37c78a72ee..1001bee91f 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py @@ -1,3 +1,5 @@ +import inspect + from maya import cmds import pyblish.api @@ -14,8 +16,7 @@ class ValidateModelContent(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Adheres to the content of 'model' product type - - Must have one top group. (configurable) - - Must only contain: transforms, meshes and groups + See `get_description` for more details. """ @@ -28,13 +29,16 @@ class ValidateModelContent(pyblish.api.InstancePlugin, validate_top_group = True optional = False + allowed = ('mesh', 'transform', 'nurbsCurve', 'nurbsSurface', 'locator') + @classmethod def get_invalid(cls, instance): content_instance = instance.data.get("setMembers", None) if not content_instance: - cls.log.error("Instance has no nodes!") - return [instance.data["name"]] + cls.log.error("Model instance has no nodes. " + "It is not allowed to be empty") + return [instance.data["instance_node"]] # All children will be included in the extracted export so we also # validate *all* descendents of the set members and we skip any @@ -46,30 +50,42 @@ class ValidateModelContent(pyblish.api.InstancePlugin, content_instance = list(set(content_instance + descendants)) # Ensure only valid node types - allowed = ('mesh', 'transform', 'nurbsCurve', 'nurbsSurface', 'locator') nodes = cmds.ls(content_instance, long=True) - valid = cmds.ls(content_instance, long=True, type=allowed) + valid = cmds.ls(content_instance, long=True, type=cls.allowed) invalid = set(nodes) - set(valid) if invalid: - cls.log.error("These nodes are not allowed: %s" % invalid) + # List as bullet points + invalid_bullets = "\n".join(f"- {node}" for node in invalid) + + cls.log.error( + "These nodes are not allowed:\n{}\n\n" + "The valid node types are: {}".format( + invalid_bullets, ", ".join(cls.allowed)) + ) return list(invalid) if not valid: - cls.log.error("No valid nodes in the instance") - return True + cls.log.error( + "No valid nodes in the model instance.\n" + "The valid node types are: {}".format(", ".join(cls.allowed)) + ) + return [instance.data["instance_node"]] # Ensure it has shapes shapes = cmds.ls(valid, long=True, shapes=True) if not shapes: cls.log.error("No shapes in the model instance") - return True + return [instance.data["instance_node"]] - # Top group - top_parents = set([x.split("|")[1] for x in content_instance]) + # Ensure single top group + top_parents = {x.split("|", 2)[1] for x in content_instance} if cls.validate_top_group and len(top_parents) != 1: - cls.log.error("Must have exactly one top group") - return top_parents + cls.log.error( + "A model instance must have exactly one top group. " + "Found top groups: {}".format(", ".join(top_parents)) + ) + return list(top_parents) def _is_visible(node): """Return whether node is visible""" @@ -101,5 +117,21 @@ class ValidateModelContent(pyblish.api.InstancePlugin, if invalid: raise PublishValidationError( title="Model content is invalid", - message="See log for more details" + message="Model content is invalid. See log for more details.", + description=self.get_description() ) + + @classmethod + def get_description(cls): + return inspect.cleandoc(f""" + ### Model content is invalid + + Your model instance does not adhere to the rules of a + model product type: + + - Must have at least one visible shape in it, like a mesh. + - Must have one root node. When exporting multiple meshes they + must be inside a group. + - May only contain the following node types: + {", ".join(cls.allowed)} + """) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py index 5ca9690fd7..bb6b590f5c 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py @@ -37,27 +37,27 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( - ("Found folder ids which are not related to " - "current project in instance: `{}`").format(instance.name)) + "Found folder ids which are not related to " + "current project in instance: `{}`".format(instance.name)) @classmethod def get_invalid(cls, instance): - invalid = [] + nodes = instance[:] + if not nodes: + return # Get all id required nodes id_required_nodes = lib.get_id_required_nodes(referenced_nodes=True, - nodes=instance[:]) + nodes=nodes) + if not id_required_nodes: + return [] # check ids against database ids - project_name = instance.context.data["projectName"] - folder_entities = ayon_api.get_folders(project_name, fields={"id"}) - folder_ids = { - folder_entity["id"] - for folder_entity in folder_entities - } + folder_ids = cls.get_project_folder_ids(context=instance.context) # Get all asset IDs + invalid = [] for node in id_required_nodes: cb_id = lib.get_id(node) @@ -71,3 +71,31 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin): invalid.append(node) return invalid + + @classmethod + def get_project_folder_ids(cls, context): + """Return all folder ids in the current project. + + Arguments: + context (pyblish.api.Context): The publish context. + + Returns: + set[str]: All folder ids in the current project. + + """ + # We query the database only for the first instance instead of + # per instance by storing a cache in the context + key = "__cache_project_folder_ids" + if key in context.data: + return context.data[key] + + # check ids against database + project_name = context.data["projectName"] + folder_entities = ayon_api.get_folders(project_name, fields={"id"}) + folder_ids = { + folder_entity["id"] + for folder_entity in folder_entities + } + + context.data[key] = folder_ids + return folder_ids diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_unique.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_unique.py index f4994922ce..6b44a307d2 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_unique.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_unique.py @@ -8,6 +8,8 @@ from ayon_core.pipeline.publish import ( import ayon_core.hosts.maya.api.action from ayon_core.hosts.maya.api import lib +from maya import cmds + class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): """Validate the nodes in the instance have a unique Colorbleed Id @@ -41,7 +43,7 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): if invalid: label = "Nodes found with non-unique folder ids" raise PublishValidationError( - message="{}: {}".format(label, invalid), + message="{}, see log".format(label), title="Non-unique folder ids on nodes", description="{}\n- {}".format(label, "\n- ".join(sorted(invalid))) @@ -54,7 +56,6 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): # Check only non intermediate shapes # todo: must the instance itself ensure to have no intermediates? # todo: how come there are intermediates? - from maya import cmds instance_members = cmds.ls(instance, noIntermediate=True, long=True) # Collect each id with their members @@ -67,10 +68,14 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): # Take only the ids with more than one member invalid = list() - _iteritems = getattr(ids, "iteritems", ids.items) - for _ids, members in _iteritems(): + for members in ids.values(): if len(members) > 1: - cls.log.error("ID found on multiple nodes: '%s'" % members) + members_text = "\n".join( + "- {}".format(member) for member in sorted(members) + ) + cls.log.error( + "ID found on multiple nodes:\n{}".format(members_text) + ) invalid.extend(members) return invalid diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py b/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py index 0171318813..e186d74b89 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py @@ -1,4 +1,5 @@ import re +import inspect import pyblish.api from maya import cmds @@ -36,7 +37,10 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin, return invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError("Invalid cameras for render.") + raise PublishValidationError( + "Invalid render cameras.", + description=self.get_description() + ) @classmethod def get_invalid(cls, instance): @@ -51,17 +55,30 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin, RenderSettings.get_image_prefix_attr(renderer) ) - + renderlayer = instance.data["renderlayer"] if len(cameras) > 1: if re.search(cls.R_CAMERA_TOKEN, file_prefix): # if there is token in prefix and we have more then # 1 camera, all is ok. return - cls.log.error("Multiple renderable cameras found for %s: %s " % - (instance.data["setMembers"], cameras)) - return [instance.data["setMembers"]] + cameras + cls.log.error( + "Multiple renderable cameras found for %s: %s ", + renderlayer, ", ".join(cameras)) + return [renderlayer] + cameras elif len(cameras) < 1: - cls.log.error("No renderable cameras found for %s " % - instance.data["setMembers"]) - return [instance.data["setMembers"]] + cls.log.error("No renderable cameras found for %s ", renderlayer) + return [renderlayer] + + def get_description(self): + return inspect.cleandoc( + """### Render Cameras Invalid + + Your render cameras are misconfigured. You may have no render + camera set or have multiple cameras with a render filename + prefix that does not include the `` token. + + See the logs for more details about the cameras. + + """ + ) diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py index 75c82164c2..ad43a24385 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py @@ -49,8 +49,9 @@ def get_selected_nodes(): """Get information from current selection""" selection = cmds.ls(selection=True, long=True) - hierarchy = lib.get_all_children(selection) - return list(set(selection + hierarchy)) + hierarchy = lib.get_all_children(selection, + ignore_intermediate_objects=True) + return list(hierarchy.union(selection)) def get_all_asset_nodes(): diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py index 74cdbeb7d4..88ef4b201a 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py @@ -51,7 +51,7 @@ def assign_vrayproxy_shaders(vrayproxy, assignments): index += 1 -def vrayproxy_assign_look(vrayproxy, product_name="lookDefault"): +def vrayproxy_assign_look(vrayproxy, product_name="lookMain"): # type: (str, str) -> None """Assign look to vray proxy. diff --git a/client/ayon_core/hosts/nuke/api/lib.py b/client/ayon_core/hosts/nuke/api/lib.py index 4fcba8d2d4..78cbe85097 100644 --- a/client/ayon_core/hosts/nuke/api/lib.py +++ b/client/ayon_core/hosts/nuke/api/lib.py @@ -2627,11 +2627,11 @@ class NukeDirmap(HostDirmap): class DirmapCache: - """Caching class to get settings and sync_module easily and only once.""" + """Caching class to get settings and sitesync easily and only once.""" _project_name = None _project_settings = None - _sync_module_discovered = False - _sync_module = None + _sitesync_addon_discovered = False + _sitesync_addon = None _mapping = None @classmethod @@ -2647,11 +2647,11 @@ class DirmapCache: return cls._project_settings @classmethod - def sync_module(cls): - if not cls._sync_module_discovered: - cls._sync_module_discovered = True - cls._sync_module = AddonsManager().get("sync_server") - return cls._sync_module + def sitesync_addon(cls): + if not cls._sitesync_addon_discovered: + cls._sitesync_addon_discovered = True + cls._sitesync_addon = AddonsManager().get("sitesync") + return cls._sitesync_addon @classmethod def mapping(cls): @@ -2673,7 +2673,7 @@ def dirmap_file_name_filter(file_name): "nuke", DirmapCache.project_name(), DirmapCache.project_settings(), - DirmapCache.sync_module(), + DirmapCache.sitesync_addon(), ) if not DirmapCache.mapping(): DirmapCache.set_mapping(dirmap_processor.get_mappings()) diff --git a/client/ayon_core/hosts/nuke/api/plugin.py b/client/ayon_core/hosts/nuke/api/plugin.py index d9cae934d7..6aa098c558 100644 --- a/client/ayon_core/hosts/nuke/api/plugin.py +++ b/client/ayon_core/hosts/nuke/api/plugin.py @@ -904,7 +904,7 @@ class ExporterReviewMov(ExporterReview): node, product_name, "Reposition node... `{}`" ) # append reformatted tag - add_tags.append("reformated") + add_tags.append("reformatted") # only create colorspace baking if toggled on if bake_viewer_process: diff --git a/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py b/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py index 2f6d121af5..afef3ba843 100644 --- a/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py +++ b/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook +from ayon_applications import PreLaunchHook class PrelaunchNukeAssistHook(PreLaunchHook): diff --git a/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py b/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py index 8358c11ca1..70f8fc730f 100644 --- a/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py +++ b/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py @@ -6,10 +6,7 @@ from ayon_core.lib import ( get_ayon_launcher_args, is_using_ayon_console, ) -from ayon_core.lib.applications import ( - PreLaunchHook, - LaunchTypes, -) +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.photoshop import get_launch_script_path diff --git a/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py b/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py index d82651289c..cf9953bfe9 100644 --- a/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py +++ b/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class PreLaunchResolveLastWorkfile(PreLaunchHook): diff --git a/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py b/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py index c14fd75b2f..f45e28d5ab 100644 --- a/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py +++ b/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py @@ -1,7 +1,7 @@ import os from pathlib import Path import platform -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.resolve.utils import setup diff --git a/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py b/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py index ab16053450..300564f7cc 100644 --- a/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py +++ b/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py @@ -1,6 +1,6 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes import ayon_core.hosts.resolve diff --git a/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py b/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py index f2254c0907..d940d7b05c 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py +++ b/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py @@ -18,7 +18,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): """Load mesh for project""" product_types = {"*"} - representations = ["abc", "fbx", "obj", "gltf"] + representations = ["abc", "fbx", "obj", "gltf", "usd", "usda", "usdc"] label = "Load mesh" order = -10 diff --git a/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py b/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py index 25e324c5cc..691b81e089 100644 --- a/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py +++ b/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py @@ -1,5 +1,5 @@ from ayon_core.lib import get_ayon_launcher_args -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class TvpaintPrelaunchHook(PreLaunchHook): diff --git a/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py b/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py index 54ffba3a63..e38591f65d 100644 --- a/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py +++ b/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py @@ -9,7 +9,7 @@ from pathlib import Path from qtpy import QtCore from ayon_core import resources -from ayon_core.lib.applications import ( +from ayon_applications import ( PreLaunchHook, ApplicationLaunchFailed, LaunchTypes, diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_animation.py b/client/ayon_core/hosts/unreal/plugins/load/load_animation.py index 0f51ac39e0..59b9f66b78 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_animation.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_animation.py @@ -8,7 +8,7 @@ from unreal import EditorAssetLibrary from unreal import MovieSceneSkeletalAnimationTrack from unreal import MovieSceneSkeletalAnimationSection -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.pipeline import ( get_representation_path, AYON_CONTAINER_ID @@ -53,7 +53,7 @@ class AnimationFBXLoader(plugin.Loader): if not actor: return None - folder_entity = get_current_project_folder(fields=["attrib.fps"]) + folder_entity = get_current_folder_entity(fields=["attrib.fps"]) task.set_editor_property('filename', path) task.set_editor_property('destination_path', asset_dir) @@ -256,7 +256,7 @@ class AnimationFBXLoader(plugin.Loader): repre_entity = context["representation"] folder_name = container["asset_name"] source_path = get_representation_path(repre_entity) - folder_entity = get_current_project_folder(fields=["attrib.fps"]) + folder_entity = get_current_folder_entity(fields=["attrib.fps"]) destination_path = container["namespace"] task = unreal.AssetImportTask() diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_layout.py b/client/ayon_core/hosts/unreal/plugins/load/load_layout.py index 6c01925453..b0f09ee8b0 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_layout.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_layout.py @@ -25,7 +25,7 @@ from ayon_core.pipeline import ( AYON_CONTAINER_ID, get_current_project_name, ) -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.settings import get_current_project_settings from ayon_core.hosts.unreal.api import plugin from ayon_core.hosts.unreal.api.pipeline import ( @@ -169,7 +169,7 @@ class LayoutLoader(plugin.Loader): anim_path = f"{asset_dir}/animations/{anim_file_name}" - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() # Import animation task = unreal.AssetImportTask() task.options = unreal.FbxImportUI() diff --git a/client/ayon_core/lib/__init__.py b/client/ayon_core/lib/__init__.py index 2ee7eecfe3..408262ca42 100644 --- a/client/ayon_core/lib/__init__.py +++ b/client/ayon_core/lib/__init__.py @@ -120,22 +120,6 @@ from .transcoding import ( get_rescaled_command_arguments, ) -from .applications import ( - ApplicationLaunchFailed, - ApplictionExecutableNotFound, - ApplicationNotFound, - ApplicationManager, - - PreLaunchHook, - PostLaunchHook, - - EnvironmentPrepData, - prepare_app_environments, - prepare_context_environments, - get_app_environments_for_context, - apply_project_environments_value -) - from .plugin_tools import ( prepare_template_data, source_hash, @@ -231,18 +215,6 @@ __all__ = [ "convert_ffprobe_fps_to_float", "get_rescaled_command_arguments", - "ApplicationLaunchFailed", - "ApplictionExecutableNotFound", - "ApplicationNotFound", - "ApplicationManager", - "PreLaunchHook", - "PostLaunchHook", - "EnvironmentPrepData", - "prepare_app_environments", - "prepare_context_environments", - "get_app_environments_for_context", - "apply_project_environments_value", - "compile_list_of_regexes", "filter_profiles", diff --git a/client/ayon_core/lib/applications.py b/client/ayon_core/lib/applications.py deleted file mode 100644 index 2db32cbfaa..0000000000 --- a/client/ayon_core/lib/applications.py +++ /dev/null @@ -1,1893 +0,0 @@ -import os -import sys -import copy -import json -import tempfile -import platform -import collections -import inspect -import subprocess -from abc import ABCMeta, abstractmethod - -import six - -from ayon_core import AYON_CORE_ROOT -from ayon_core.settings import get_project_settings, get_studio_settings -from .log import Logger -from .profiles_filtering import filter_profiles -from .local_settings import get_ayon_username - -from .python_module_tools import ( - modules_from_path, - classes_from_module -) -from .execute import ( - find_executable, - get_linux_launcher_args -) - -_logger = None - -PLATFORM_NAMES = {"windows", "linux", "darwin"} -DEFAULT_ENV_SUBGROUP = "standard" -CUSTOM_LAUNCH_APP_GROUPS = { - "djvview" -} - - -class LaunchTypes: - """Launch types are filters for pre/post-launch hooks. - - Please use these variables in case they'll change values. - """ - - # Local launch - application is launched on local machine - local = "local" - # Farm render job - application is on farm - farm_render = "farm-render" - # Farm publish job - integration post-render job - farm_publish = "farm-publish" - # Remote launch - application is launched on remote machine from which - # can be started publishing - remote = "remote" - # Automated launch - application is launched with automated publishing - automated = "automated" - - -def parse_environments(env_data, env_group=None, platform_name=None): - """Parse environment values from settings byt group and platform. - - Data may contain up to 2 hierarchical levels of dictionaries. At the end - of the last level must be string or list. List is joined using platform - specific joiner (';' for windows and ':' for linux and mac). - - Hierarchical levels can contain keys for subgroups and platform name. - Platform specific values must be always last level of dictionary. Platform - names are "windows" (MS Windows), "linux" (any linux distribution) and - "darwin" (any MacOS distribution). - - Subgroups are helpers added mainly for standard and on farm usage. Farm - may require different environments for e.g. licence related values or - plugins. Default subgroup is "standard". - - Examples: - ``` - { - # Unchanged value - "ENV_KEY1": "value", - # Empty values are kept (unset environment variable) - "ENV_KEY2": "", - - # Join list values with ':' or ';' - "ENV_KEY3": ["value1", "value2"], - - # Environment groups - "ENV_KEY4": { - "standard": "DEMO_SERVER_URL", - "farm": "LICENCE_SERVER_URL" - }, - - # Platform specific (and only for windows and mac) - "ENV_KEY5": { - "windows": "windows value", - "darwin": ["value 1", "value 2"] - }, - - # Environment groups and platform combination - "ENV_KEY6": { - "farm": "FARM_VALUE", - "standard": { - "windows": ["value1", "value2"], - "linux": "value1", - "darwin": "" - } - } - } - ``` - """ - output = {} - if not env_data: - return output - - if not env_group: - env_group = DEFAULT_ENV_SUBGROUP - - if not platform_name: - platform_name = platform.system().lower() - - for key, value in env_data.items(): - if isinstance(value, dict): - # Look if any key is platform key - # - expect that represents environment group if does not contain - # platform keys - if not PLATFORM_NAMES.intersection(set(value.keys())): - # Skip the key if group is not available - if env_group not in value: - continue - value = value[env_group] - - # Check again if value is dictionary - # - this time there should be only platform keys - if isinstance(value, dict): - value = value.get(platform_name) - - # Check if value is list and join it's values - # QUESTION Should empty values be skipped? - if isinstance(value, (list, tuple)): - value = os.pathsep.join(value) - - # Set key to output if value is string - if isinstance(value, six.string_types): - output[key] = value - return output - - -def get_logger(): - """Global lib.applications logger getter.""" - global _logger - if _logger is None: - _logger = Logger.get_logger(__name__) - return _logger - - -class ApplicationNotFound(Exception): - """Application was not found in ApplicationManager by name.""" - - def __init__(self, app_name): - self.app_name = app_name - super(ApplicationNotFound, self).__init__( - "Application \"{}\" was not found.".format(app_name) - ) - - -class ApplictionExecutableNotFound(Exception): - """Defined executable paths are not available on the machine.""" - - def __init__(self, application): - self.application = application - details = None - if not application.executables: - msg = ( - "Executable paths for application \"{}\"({}) are not set." - ) - else: - msg = ( - "Defined executable paths for application \"{}\"({})" - " are not available on this machine." - ) - details = "Defined paths:" - for executable in application.executables: - details += "\n- " + executable.executable_path - - self.msg = msg.format(application.full_label, application.full_name) - self.details = details - - exc_mgs = str(self.msg) - if details: - # Is good idea to pass new line symbol to exception message? - exc_mgs += "\n" + details - self.exc_msg = exc_mgs - super(ApplictionExecutableNotFound, self).__init__(exc_mgs) - - -class ApplicationLaunchFailed(Exception): - """Application launch failed due to known reason. - - Message should be self explanatory as traceback won't be shown. - """ - pass - - -class ApplicationGroup: - """Hold information about application group. - - Application group wraps different versions(variants) of application. - e.g. "maya" is group and "maya_2020" is variant. - - Group hold `host_name` which is implementation name used in AYON. Also - holds `enabled` if whole app group is enabled or `icon` for application - icon path in resources. - - Group has also `environment` which hold same environments for all variants. - - Args: - name (str): Groups' name. - data (dict): Group defying data loaded from settings. - manager (ApplicationManager): Manager that created the group. - """ - - def __init__(self, name, data, manager): - self.name = name - self.manager = manager - self._data = data - - self.enabled = data["enabled"] - self.label = data["label"] or None - self.icon = data["icon"] or None - env = {} - try: - env = json.loads(data["environment"]) - except Exception: - pass - self._environment = env - - host_name = data["host_name"] or None - self.is_host = host_name is not None - self.host_name = host_name - - settings_variants = data["variants"] - variants = {} - for variant_data in settings_variants: - app_variant = Application(variant_data, self) - variants[app_variant.name] = app_variant - - self.variants = variants - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.name) - - def __iter__(self): - for variant in self.variants.values(): - yield variant - - @property - def environment(self): - return copy.deepcopy(self._environment) - - -class Application: - """Hold information about application. - - Object by itself does nothing special. - - Args: - data (dict): Data for the version containing information about - executables, variant label or if is enabled. - Only required key is `executables`. - group (ApplicationGroup): App group object that created the application - and under which application belongs. - - """ - def __init__(self, data, group): - self._data = data - name = data["name"] - label = data["label"] or name - enabled = False - if group.enabled: - enabled = data.get("enabled", True) - - if group.label: - full_label = " ".join((group.label, label)) - else: - full_label = label - env = {} - try: - env = json.loads(data["environment"]) - except Exception: - pass - - arguments = data["arguments"] - if isinstance(arguments, dict): - arguments = arguments.get(platform.system().lower()) - - if not arguments: - arguments = [] - - _executables = data["executables"].get(platform.system().lower(), []) - executables = [ - ApplicationExecutable(executable) - for executable in _executables - ] - - self.group = group - - self.name = name - self.label = label - self.enabled = enabled - self.use_python_2 = data.get("use_python_2", False) - - self.full_name = "/".join((group.name, name)) - self.full_label = full_label - self.arguments = arguments - self.executables = executables - self._environment = env - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.full_name) - - @property - def environment(self): - return copy.deepcopy(self._environment) - - @property - def manager(self): - return self.group.manager - - @property - def host_name(self): - return self.group.host_name - - @property - def icon(self): - return self.group.icon - - @property - def is_host(self): - return self.group.is_host - - def find_executable(self): - """Try to find existing executable for application. - - Returns (str): Path to executable from `executables` or None if any - exists. - """ - for executable in self.executables: - if executable.exists(): - return executable - return None - - def launch(self, *args, **kwargs): - """Launch the application. - - For this purpose is used manager's launch method to keep logic at one - place. - - Arguments must match with manager's launch method. That's why *args - **kwargs are used. - - Returns: - subprocess.Popen: Return executed process as Popen object. - """ - return self.manager.launch(self.full_name, *args, **kwargs) - - -class ApplicationManager: - """Load applications and tools and store them by their full name. - - Args: - studio_settings (dict): Preloaded studio settings. When passed manager - will always use these values. Gives ability to create manager - using different settings. - """ - - def __init__(self, studio_settings=None): - self.log = Logger.get_logger(self.__class__.__name__) - - self.app_groups = {} - self.applications = {} - self.tool_groups = {} - self.tools = {} - - self._studio_settings = studio_settings - - self.refresh() - - def set_studio_settings(self, studio_settings): - """Ability to change init system settings. - - This will trigger refresh of manager. - """ - self._studio_settings = studio_settings - - self.refresh() - - def refresh(self): - """Refresh applications from settings.""" - self.app_groups.clear() - self.applications.clear() - self.tool_groups.clear() - self.tools.clear() - - if self._studio_settings is not None: - settings = copy.deepcopy(self._studio_settings) - else: - settings = get_studio_settings( - clear_metadata=False, exclude_locals=False - ) - - applications_addon_settings = settings["applications"] - - # Prepare known applications - app_defs = applications_addon_settings["applications"] - additional_apps = app_defs.pop("additional_apps") - for additional_app in additional_apps: - app_name = additional_app.pop("name") - if app_name in app_defs: - self.log.warning(( - "Additional application '{}' is already" - " in built-in applications." - ).format(app_name)) - app_defs[app_name] = additional_app - - for group_name, variant_defs in app_defs.items(): - group = ApplicationGroup(group_name, variant_defs, self) - self.app_groups[group_name] = group - for app in group: - self.applications[app.full_name] = app - - tools_definitions = applications_addon_settings["tool_groups"] - for tool_group_data in tools_definitions: - group = EnvironmentToolGroup(tool_group_data, self) - self.tool_groups[group.name] = group - for tool in group: - self.tools[tool.full_name] = tool - - def find_latest_available_variant_for_group(self, group_name): - group = self.app_groups.get(group_name) - if group is None or not group.enabled: - return None - - output = None - for _, variant in reversed(sorted(group.variants.items())): - executable = variant.find_executable() - if executable: - output = variant - break - return output - - def create_launch_context(self, app_name, **data): - """Prepare launch context for application. - - Args: - app_name (str): Name of application that should be launched. - **data (Any): Any additional data. Data may be used during - - Returns: - ApplicationLaunchContext: Launch context for application. - - Raises: - ApplicationNotFound: Application was not found by entered name. - """ - - app = self.applications.get(app_name) - if not app: - raise ApplicationNotFound(app_name) - - executable = app.find_executable() - - return ApplicationLaunchContext( - app, executable, **data - ) - - def launch_with_context(self, launch_context): - """Launch application using existing launch context. - - Args: - launch_context (ApplicationLaunchContext): Prepared launch - context. - """ - - if not launch_context.executable: - raise ApplictionExecutableNotFound(launch_context.application) - return launch_context.launch() - - def launch(self, app_name, **data): - """Launch procedure. - - For host application it's expected to contain "project_name", - "folder_path" and "task_name". - - Args: - app_name (str): Name of application that should be launched. - **data (dict): Any additional data. Data may be used during - preparation to store objects usable in multiple places. - - Raises: - ApplicationNotFound: Application was not found by entered - argument `app_name`. - ApplictionExecutableNotFound: Executables in application definition - were not found on this machine. - ApplicationLaunchFailed: Something important for application launch - failed. Exception should contain explanation message, - traceback should not be needed. - """ - - context = self.create_launch_context(app_name, **data) - return self.launch_with_context(context) - - - -class EnvironmentToolGroup: - """Hold information about environment tool group. - - Environment tool group may hold different variants of same tool and set - environments that are same for all of them. - - e.g. "mtoa" may have different versions but all environments except one - are same. - - Args: - data (dict): Group information with variants. - manager (ApplicationManager): Manager that creates the group. - """ - - def __init__(self, data, manager): - name = data["name"] - label = data["label"] - - self.name = name - self.label = label - self._data = data - self.manager = manager - - environment = {} - try: - environment = json.loads(data["environment"]) - except Exception: - pass - self._environment = environment - - variants = data.get("variants") or [] - variants_by_name = {} - for variant_data in variants: - tool = EnvironmentTool(variant_data, self) - variants_by_name[tool.name] = tool - self.variants = variants_by_name - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.name) - - def __iter__(self): - for variant in self.variants.values(): - yield variant - - @property - def environment(self): - return copy.deepcopy(self._environment) - - -class EnvironmentTool: - """Hold information about application tool. - - Structure of tool information. - - Args: - variant_data (dict): Variant data with environments and - host and app variant filters. - group (EnvironmentToolGroup): Name of group which wraps tool. - """ - - def __init__(self, variant_data, group): - # Backwards compatibility 3.9.1 - 3.9.2 - # - 'variant_data' contained only environments but contain also host - # and application variant filters - name = variant_data["name"] - label = variant_data["label"] - host_names = variant_data["host_names"] - app_variants = variant_data["app_variants"] - - environment = {} - try: - environment = json.loads(variant_data["environment"]) - except Exception: - pass - - self.host_names = host_names - self.app_variants = app_variants - self.name = name - self.variant_label = label - self.label = " ".join((group.label, label)) - self.group = group - - self._environment = environment - self.full_name = "/".join((group.name, name)) - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.full_name) - - @property - def environment(self): - return copy.deepcopy(self._environment) - - def is_valid_for_app(self, app): - """Is tool valid for application. - - Args: - app (Application): Application for which are prepared environments. - """ - if self.app_variants and app.full_name not in self.app_variants: - return False - - if self.host_names and app.host_name not in self.host_names: - return False - return True - - -class ApplicationExecutable: - """Representation of executable loaded from settings.""" - - def __init__(self, executable): - # Try to format executable with environments - try: - executable = executable.format(**os.environ) - except Exception: - pass - - # On MacOS check if exists path to executable when ends with `.app` - # - it is common that path will lead to "/Applications/Blender" but - # real path is "/Applications/Blender.app" - if platform.system().lower() == "darwin": - executable = self.macos_executable_prep(executable) - - self.executable_path = executable - - def __str__(self): - return self.executable_path - - def __repr__(self): - return "<{}> {}".format(self.__class__.__name__, self.executable_path) - - @staticmethod - def macos_executable_prep(executable): - """Try to find full path to executable file. - - Real executable is stored in '*.app/Contents/MacOS/'. - - Having path to '*.app' gives ability to read it's plist info and - use "CFBundleExecutable" key from plist to know what is "executable." - - Plist is stored in '*.app/Contents/Info.plist'. - - This is because some '*.app' directories don't have same permissions - as real executable. - """ - # Try to find if there is `.app` file - if not os.path.exists(executable): - _executable = executable + ".app" - if os.path.exists(_executable): - executable = _executable - - # Try to find real executable if executable has `Contents` subfolder - contents_dir = os.path.join(executable, "Contents") - if os.path.exists(contents_dir): - executable_filename = None - # Load plist file and check for bundle executable - plist_filepath = os.path.join(contents_dir, "Info.plist") - if os.path.exists(plist_filepath): - import plistlib - - if hasattr(plistlib, "load"): - with open(plist_filepath, "rb") as stream: - parsed_plist = plistlib.load(stream) - else: - parsed_plist = plistlib.readPlist(plist_filepath) - executable_filename = parsed_plist.get("CFBundleExecutable") - - if executable_filename: - executable = os.path.join( - contents_dir, "MacOS", executable_filename - ) - - return executable - - def as_args(self): - return [self.executable_path] - - def _realpath(self): - """Check if path is valid executable path.""" - # Check for executable in PATH - result = find_executable(self.executable_path) - if result is not None: - return result - - # This is not 100% validation but it is better than remove ability to - # launch .bat, .sh or extentionless files - if os.path.exists(self.executable_path): - return self.executable_path - return None - - def exists(self): - if not self.executable_path: - return False - return bool(self._realpath()) - - -class UndefinedApplicationExecutable(ApplicationExecutable): - """Some applications do not require executable path from settings. - - In that case this class is used to "fake" existing executable. - """ - def __init__(self): - pass - - def __str__(self): - return self.__class__.__name__ - - def __repr__(self): - return "<{}>".format(self.__class__.__name__) - - def as_args(self): - return [] - - def exists(self): - return True - - -@six.add_metaclass(ABCMeta) -class LaunchHook: - """Abstract base class of launch hook.""" - # Order of prelaunch hook, will be executed as last if set to None. - order = None - # List of host implementations, skipped if empty. - hosts = set() - # Set of application groups - app_groups = set() - # Set of specific application names - app_names = set() - # Set of platform availability - platforms = set() - # Set of launch types for which is available - # - if empty then is available for all launch types - # - by default has 'local' which is most common reason for launc hooks - launch_types = {LaunchTypes.local} - - def __init__(self, launch_context): - """Constructor of launch hook. - - Always should be called - """ - self.log = Logger.get_logger(self.__class__.__name__) - - self.launch_context = launch_context - - is_valid = self.class_validation(launch_context) - if is_valid: - is_valid = self.validate() - - self.is_valid = is_valid - - @classmethod - def class_validation(cls, launch_context): - """Validation of class attributes by launch context. - - Args: - launch_context (ApplicationLaunchContext): Context of launching - application. - - Returns: - bool: Is launch hook valid for the context by class attributes. - """ - if cls.platforms: - low_platforms = tuple( - _platform.lower() - for _platform in cls.platforms - ) - if platform.system().lower() not in low_platforms: - return False - - if cls.hosts: - if launch_context.host_name not in cls.hosts: - return False - - if cls.app_groups: - if launch_context.app_group.name not in cls.app_groups: - return False - - if cls.app_names: - if launch_context.app_name not in cls.app_names: - return False - - if cls.launch_types: - if launch_context.launch_type not in cls.launch_types: - return False - - return True - - @property - def data(self): - return self.launch_context.data - - @property - def application(self): - return getattr(self.launch_context, "application", None) - - @property - def manager(self): - return getattr(self.application, "manager", None) - - @property - def host_name(self): - return getattr(self.application, "host_name", None) - - @property - def app_group(self): - return getattr(self.application, "group", None) - - @property - def app_name(self): - return getattr(self.application, "full_name", None) - - @property - def addons_manager(self): - return getattr(self.launch_context, "addons_manager", None) - - @property - def modules_manager(self): - """ - Deprecated: - Use 'addons_wrapper' instead. - """ - return self.addons_manager - - def validate(self): - """Optional validation of launch hook on initialization. - - Returns: - bool: Hook is valid (True) or invalid (False). - """ - # QUESTION Not sure if this method has any usable potential. - # - maybe result can be based on settings - return True - - @abstractmethod - def execute(self, *args, **kwargs): - """Abstract execute method where logic of hook is.""" - pass - - -class PreLaunchHook(LaunchHook): - """Abstract class of prelaunch hook. - - This launch hook will be processed before application is launched. - - If any exception will happen during processing the application won't be - launched. - """ - - -class PostLaunchHook(LaunchHook): - """Abstract class of postlaunch hook. - - This launch hook will be processed after application is launched. - - Nothing will happen if any exception will happen during processing. And - processing of other postlaunch hooks won't stop either. - """ - - -class ApplicationLaunchContext: - """Context of launching application. - - Main purpose of context is to prepare launch arguments and keyword - arguments for new process. Most important part of keyword arguments - preparations are environment variables. - - During the whole process is possible to use `data` attribute to store - object usable in multiple places. - - Launch arguments are strings in list. It is possible to "chain" argument - when order of them matters. That is possible to do with adding list where - order is right and should not change. - NOTE: This is recommendation, not requirement. - e.g.: `["nuke.exe", "--NukeX"]` -> In this case any part of process may - insert argument between `nuke.exe` and `--NukeX`. To keep them together - it is better to wrap them in another list: `[["nuke.exe", "--NukeX"]]`. - - Notes: - It is possible to use launch context only to prepare environment - variables. In that case `executable` may be None and can be used - 'run_prelaunch_hooks' method to run prelaunch hooks which prepare - them. - - Args: - application (Application): Application definition. - executable (ApplicationExecutable): Object with path to executable. - env_group (Optional[str]): Environment variable group. If not set - 'DEFAULT_ENV_SUBGROUP' is used. - launch_type (Optional[str]): Launch type. If not set 'local' is used. - **data (dict): Any additional data. Data may be used during - preparation to store objects usable in multiple places. - """ - - def __init__( - self, - application, - executable, - env_group=None, - launch_type=None, - **data - ): - from ayon_core.addon import AddonsManager - - # Application object - self.application = application - - self.addons_manager = AddonsManager() - - # Logger - logger_name = "{}-{}".format(self.__class__.__name__, - self.application.full_name) - self.log = Logger.get_logger(logger_name) - - self.executable = executable - - if launch_type is None: - launch_type = LaunchTypes.local - self.launch_type = launch_type - - if env_group is None: - env_group = DEFAULT_ENV_SUBGROUP - - self.env_group = env_group - - self.data = dict(data) - - launch_args = [] - if executable is not None: - launch_args = executable.as_args() - # subprocess.Popen launch arguments (first argument in constructor) - self.launch_args = launch_args - self.launch_args.extend(application.arguments) - if self.data.get("app_args"): - self.launch_args.extend(self.data.pop("app_args")) - - # Handle launch environemtns - src_env = self.data.pop("env", None) - if src_env is not None and not isinstance(src_env, dict): - self.log.warning(( - "Passed `env` kwarg has invalid type: {}. Expected: `dict`." - " Using `os.environ` instead." - ).format(str(type(src_env)))) - src_env = None - - if src_env is None: - src_env = os.environ - - ignored_env = {"QT_API", } - env = { - key: str(value) - for key, value in src_env.items() - if key not in ignored_env - } - # subprocess.Popen keyword arguments - self.kwargs = {"env": env} - - if platform.system().lower() == "windows": - # Detach new process from currently running process on Windows - flags = ( - subprocess.CREATE_NEW_PROCESS_GROUP - | subprocess.DETACHED_PROCESS - ) - self.kwargs["creationflags"] = flags - - if not sys.stdout: - self.kwargs["stdout"] = subprocess.DEVNULL - self.kwargs["stderr"] = subprocess.DEVNULL - - self.prelaunch_hooks = None - self.postlaunch_hooks = None - - self.process = None - self._prelaunch_hooks_executed = False - - @property - def env(self): - if ( - "env" not in self.kwargs - or self.kwargs["env"] is None - ): - self.kwargs["env"] = {} - return self.kwargs["env"] - - @env.setter - def env(self, value): - if not isinstance(value, dict): - raise ValueError( - "'env' attribute expect 'dict' object. Got: {}".format( - str(type(value)) - ) - ) - self.kwargs["env"] = value - - @property - def modules_manager(self): - """ - Deprecated: - Use 'addons_manager' instead. - - """ - return self.addons_manager - - def _collect_addons_launch_hook_paths(self): - """Helper to collect application launch hooks from addons. - - Module have to have implemented 'get_launch_hook_paths' method which - can expect application as argument or nothing. - - Returns: - List[str]: Paths to launch hook directories. - """ - - expected_types = (list, tuple, set) - - output = [] - for module in self.addons_manager.get_enabled_addons(): - # Skip module if does not have implemented 'get_launch_hook_paths' - func = getattr(module, "get_launch_hook_paths", None) - if func is None: - continue - - func = module.get_launch_hook_paths - if hasattr(inspect, "signature"): - sig = inspect.signature(func) - expect_args = len(sig.parameters) > 0 - else: - expect_args = len(inspect.getargspec(func)[0]) > 0 - - # Pass application argument if method expect it. - try: - if expect_args: - hook_paths = func(self.application) - else: - hook_paths = func() - except Exception: - self.log.warning( - "Failed to call 'get_launch_hook_paths'", - exc_info=True - ) - continue - - if not hook_paths: - continue - - # Convert string to list - if isinstance(hook_paths, six.string_types): - hook_paths = [hook_paths] - - # Skip invalid types - if not isinstance(hook_paths, expected_types): - self.log.warning(( - "Result of `get_launch_hook_paths`" - " has invalid type {}. Expected {}" - ).format(type(hook_paths), expected_types)) - continue - - output.extend(hook_paths) - return output - - def paths_to_launch_hooks(self): - """Directory paths where to look for launch hooks.""" - # This method has potential to be part of application manager (maybe). - paths = [] - - # TODO load additional studio paths from settings - global_hooks_dir = os.path.join(AYON_CORE_ROOT, "hooks") - - hooks_dirs = [ - global_hooks_dir - ] - if self.host_name: - # If host requires launch hooks and is module then launch hooks - # should be collected using 'collect_launch_hook_paths' - # - module have to implement 'get_launch_hook_paths' - host_module = self.addons_manager.get_host_addon(self.host_name) - if not host_module: - hooks_dirs.append(os.path.join( - AYON_CORE_ROOT, "hosts", self.host_name, "hooks" - )) - - for path in hooks_dirs: - if ( - os.path.exists(path) - and os.path.isdir(path) - and path not in paths - ): - paths.append(path) - - # Load modules paths - paths.extend(self._collect_addons_launch_hook_paths()) - - return paths - - def discover_launch_hooks(self, force=False): - """Load and prepare launch hooks.""" - if ( - self.prelaunch_hooks is not None - or self.postlaunch_hooks is not None - ): - if not force: - self.log.info("Launch hooks were already discovered.") - return - - self.prelaunch_hooks.clear() - self.postlaunch_hooks.clear() - - self.log.debug("Discovery of launch hooks started.") - - paths = self.paths_to_launch_hooks() - self.log.debug("Paths searched for launch hooks:\n{}".format( - "\n".join("- {}".format(path) for path in paths) - )) - - all_classes = { - "pre": [], - "post": [] - } - for path in paths: - if not os.path.exists(path): - self.log.info( - "Path to launch hooks does not exist: \"{}\"".format(path) - ) - continue - - modules, _crashed = modules_from_path(path) - for _filepath, module in modules: - all_classes["pre"].extend( - classes_from_module(PreLaunchHook, module) - ) - all_classes["post"].extend( - classes_from_module(PostLaunchHook, module) - ) - - for launch_type, classes in all_classes.items(): - hooks_with_order = [] - hooks_without_order = [] - for klass in classes: - try: - hook = klass(self) - if not hook.is_valid: - self.log.debug( - "Skipped hook invalid for current launch context: " - "{}".format(klass.__name__) - ) - continue - - if inspect.isabstract(hook): - self.log.debug("Skipped abstract hook: {}".format( - klass.__name__ - )) - continue - - # Separate hooks by pre/post class - if hook.order is None: - hooks_without_order.append(hook) - else: - hooks_with_order.append(hook) - - except Exception: - self.log.warning( - "Initialization of hook failed: " - "{}".format(klass.__name__), - exc_info=True - ) - - # Sort hooks with order by order - ordered_hooks = list(sorted( - hooks_with_order, key=lambda obj: obj.order - )) - # Extend ordered hooks with hooks without defined order - ordered_hooks.extend(hooks_without_order) - - if launch_type == "pre": - self.prelaunch_hooks = ordered_hooks - else: - self.postlaunch_hooks = ordered_hooks - - self.log.debug("Found {} prelaunch and {} postlaunch hooks.".format( - len(self.prelaunch_hooks), len(self.postlaunch_hooks) - )) - - @property - def app_name(self): - return self.application.name - - @property - def host_name(self): - return self.application.host_name - - @property - def app_group(self): - return self.application.group - - @property - def manager(self): - return self.application.manager - - def _run_process(self): - # Windows and MacOS have easier process start - low_platform = platform.system().lower() - if low_platform in ("windows", "darwin"): - return subprocess.Popen(self.launch_args, **self.kwargs) - - # Linux uses mid process - # - it is possible that the mid process executable is not - # available for this version of AYON in that case use standard - # launch - launch_args = get_linux_launcher_args() - if launch_args is None: - return subprocess.Popen(self.launch_args, **self.kwargs) - - # Prepare data that will be passed to midprocess - # - store arguments to a json and pass path to json as last argument - # - pass environments to set - app_env = self.kwargs.pop("env", {}) - json_data = { - "args": self.launch_args, - "env": app_env - } - if app_env: - # Filter environments of subprocess - self.kwargs["env"] = { - key: value - for key, value in os.environ.items() - if key in app_env - } - - # Create temp file - json_temp = tempfile.NamedTemporaryFile( - mode="w", prefix="op_app_args", suffix=".json", delete=False - ) - json_temp.close() - json_temp_filpath = json_temp.name - with open(json_temp_filpath, "w") as stream: - json.dump(json_data, stream) - - launch_args.append(json_temp_filpath) - - # Create mid-process which will launch application - process = subprocess.Popen(launch_args, **self.kwargs) - # Wait until the process finishes - # - This is important! The process would stay in "open" state. - process.wait() - # Remove the temp file - os.remove(json_temp_filpath) - # Return process which is already terminated - return process - - def run_prelaunch_hooks(self): - """Run prelaunch hooks. - - This method will be executed only once, any future calls will skip - the processing. - """ - - if self._prelaunch_hooks_executed: - self.log.warning("Prelaunch hooks were already executed.") - return - # Discover launch hooks - self.discover_launch_hooks() - - # Execute prelaunch hooks - for prelaunch_hook in self.prelaunch_hooks: - self.log.debug("Executing prelaunch hook: {}".format( - str(prelaunch_hook.__class__.__name__) - )) - prelaunch_hook.execute() - self._prelaunch_hooks_executed = True - - def launch(self): - """Collect data for new process and then create it. - - This method must not be executed more than once. - - Returns: - subprocess.Popen: Created process as Popen object. - """ - if self.process is not None: - self.log.warning("Application was already launched.") - return - - if not self._prelaunch_hooks_executed: - self.run_prelaunch_hooks() - - self.log.debug("All prelaunch hook executed. Starting new process.") - - # Prepare subprocess args - args_len_str = "" - if isinstance(self.launch_args, str): - args = self.launch_args - else: - args = self.clear_launch_args(self.launch_args) - args_len_str = " ({})".format(len(args)) - self.log.info( - "Launching \"{}\" with args{}: {}".format( - self.application.full_name, args_len_str, args - ) - ) - self.launch_args = args - - # Run process - self.process = self._run_process() - - # Process post launch hooks - for postlaunch_hook in self.postlaunch_hooks: - self.log.debug("Executing postlaunch hook: {}".format( - str(postlaunch_hook.__class__.__name__) - )) - - # TODO how to handle errors? - # - store to variable to let them accessible? - try: - postlaunch_hook.execute() - - except Exception: - self.log.warning( - "After launch procedures were not successful.", - exc_info=True - ) - - self.log.debug("Launch of {} finished.".format( - self.application.full_name - )) - - return self.process - - @staticmethod - def clear_launch_args(args): - """Collect launch arguments to final order. - - Launch argument should be list that may contain another lists this - function will upack inner lists and keep ordering. - - ``` - # source - [ [ arg1, [ arg2, arg3 ] ], arg4, [arg5, arg6]] - # result - [ arg1, arg2, arg3, arg4, arg5, arg6] - - Args: - args (list): Source arguments in list may contain inner lists. - - Return: - list: Unpacked arguments. - """ - if isinstance(args, str): - return args - all_cleared = False - while not all_cleared: - all_cleared = True - new_args = [] - for arg in args: - if isinstance(arg, (list, tuple, set)): - all_cleared = False - for _arg in arg: - new_args.append(_arg) - else: - new_args.append(arg) - args = new_args - - return args - - -class MissingRequiredKey(KeyError): - pass - - -class EnvironmentPrepData(dict): - """Helper dictionary for storin temp data during environment prep. - - Args: - data (dict): Data must contain required keys. - """ - required_keys = ( - "project_entity", "folder_entity", "task_entity", "app", "anatomy" - ) - - def __init__(self, data): - for key in self.required_keys: - if key not in data: - raise MissingRequiredKey(key) - - if not data.get("log"): - data["log"] = get_logger() - - if data.get("env") is None: - data["env"] = os.environ.copy() - - project_name = data["project_entity"]["name"] - if "project_settings" not in data: - data["project_settings"] = get_project_settings(project_name) - - super(EnvironmentPrepData, self).__init__(data) - - -def get_app_environments_for_context( - project_name, - folder_path, - task_name, - app_name, - env_group=None, - launch_type=None, - env=None, - addons_manager=None -): - """Prepare environment variables by context. - Args: - project_name (str): Name of project. - folder_path (str): Folder path. - task_name (str): Name of task. - app_name (str): Name of application that is launched and can be found - by ApplicationManager. - env_group (Optional[str]): Name of environment group. If not passed - default group is used. - launch_type (Optional[str]): Type for which prelaunch hooks are - executed. - env (Optional[dict[str, str]]): Initial environment variables. - `os.environ` is used when not passed. - addons_manager (Optional[AddonsManager]): Initialized modules - manager. - - Returns: - dict: Environments for passed context and application. - """ - - # Prepare app object which can be obtained only from ApplicationManager - app_manager = ApplicationManager() - context = app_manager.create_launch_context( - app_name, - project_name=project_name, - folder_path=folder_path, - task_name=task_name, - env_group=env_group, - launch_type=launch_type, - env=env, - addons_manager=addons_manager, - modules_manager=addons_manager, - ) - context.run_prelaunch_hooks() - return context.env - - -def _merge_env(env, current_env): - """Modified function(merge) from acre module.""" - import acre - - result = current_env.copy() - for key, value in env.items(): - # Keep missing keys by not filling `missing` kwarg - value = acre.lib.partial_format(value, data=current_env) - result[key] = value - return result - - -def _add_python_version_paths(app, env, logger, addons_manager): - """Add vendor packages specific for a Python version.""" - - for addon in addons_manager.get_enabled_addons(): - addon.modify_application_launch_arguments(app, env) - - # Skip adding if host name is not set - if not app.host_name: - return - - # Add Python 2/3 modules - python_vendor_dir = os.path.join( - AYON_CORE_ROOT, - "vendor", - "python" - ) - if app.use_python_2: - pythonpath = os.path.join(python_vendor_dir, "python_2") - else: - pythonpath = os.path.join(python_vendor_dir, "python_3") - - if not os.path.exists(pythonpath): - return - - logger.debug("Adding Python version specific paths to PYTHONPATH") - python_paths = [pythonpath] - - # Load PYTHONPATH from current launch context - python_path = env.get("PYTHONPATH") - if python_path: - python_paths.append(python_path) - - # Set new PYTHONPATH to launch context environments - env["PYTHONPATH"] = os.pathsep.join(python_paths) - - -def prepare_app_environments( - data, env_group=None, implementation_envs=True, addons_manager=None -): - """Modify launch environments based on launched app and context. - - Args: - data (EnvironmentPrepData): Dictionary where result and intermediate - result will be stored. - """ - import acre - - app = data["app"] - log = data["log"] - source_env = data["env"].copy() - - if addons_manager is None: - from ayon_core.addon import AddonsManager - - addons_manager = AddonsManager() - - _add_python_version_paths(app, source_env, log, addons_manager) - - # Use environments from local settings - filtered_local_envs = {} - # NOTE Overrides for environment variables are not implemented in AYON. - # project_settings = data["project_settings"] - # whitelist_envs = project_settings["general"].get("local_env_white_list") - # if whitelist_envs: - # local_settings = get_local_settings() - # local_envs = local_settings.get("environments") or {} - # filtered_local_envs = { - # key: value - # for key, value in local_envs.items() - # if key in whitelist_envs - # } - - # Apply local environment variables for already existing values - for key, value in filtered_local_envs.items(): - if key in source_env: - source_env[key] = value - - # `app_and_tool_labels` has debug purpose - app_and_tool_labels = [app.full_name] - # Environments for application - environments = [ - app.group.environment, - app.environment - ] - - folder_entity = data.get("folder_entity") - # Add tools environments - groups_by_name = {} - tool_by_group_name = collections.defaultdict(dict) - if folder_entity: - # Make sure each tool group can be added only once - for key in folder_entity["attrib"].get("tools") or []: - tool = app.manager.tools.get(key) - if not tool or not tool.is_valid_for_app(app): - continue - groups_by_name[tool.group.name] = tool.group - tool_by_group_name[tool.group.name][tool.name] = tool - - for group_name in sorted(groups_by_name.keys()): - group = groups_by_name[group_name] - environments.append(group.environment) - for tool_name in sorted(tool_by_group_name[group_name].keys()): - tool = tool_by_group_name[group_name][tool_name] - environments.append(tool.environment) - app_and_tool_labels.append(tool.full_name) - - log.debug( - "Will add environments for apps and tools: {}".format( - ", ".join(app_and_tool_labels) - ) - ) - - env_values = {} - for _env_values in environments: - if not _env_values: - continue - - # Choose right platform - tool_env = parse_environments(_env_values, env_group) - - # Apply local environment variables - # - must happen between all values because they may be used during - # merge - for key, value in filtered_local_envs.items(): - if key in tool_env: - tool_env[key] = value - - # Merge dictionaries - env_values = _merge_env(tool_env, env_values) - - merged_env = _merge_env(env_values, source_env) - - loaded_env = acre.compute(merged_env, cleanup=False) - - final_env = None - # Add host specific environments - if app.host_name and implementation_envs: - host_addon = addons_manager.get_host_addon(app.host_name) - if not host_addon: - module = __import__("ayon_core.hosts", fromlist=[app.host_name]) - host_module = getattr(module, app.host_name, None) - add_implementation_envs = None - if host_addon: - add_implementation_envs = getattr( - host_addon, "add_implementation_envs", None - ) - if add_implementation_envs: - # Function may only modify passed dict without returning value - final_env = add_implementation_envs(loaded_env, app) - - if final_env is None: - final_env = loaded_env - - keys_to_remove = set(source_env.keys()) - set(final_env.keys()) - - # Update env - data["env"].update(final_env) - for key in keys_to_remove: - data["env"].pop(key, None) - - -def apply_project_environments_value( - project_name, env, project_settings=None, env_group=None -): - """Apply project specific environments on passed environments. - - The environments are applied on passed `env` argument value so it is not - required to apply changes back. - - Args: - project_name (str): Name of project for which environments should be - received. - env (dict): Environment values on which project specific environments - will be applied. - project_settings (dict): Project settings for passed project name. - Optional if project settings are already prepared. - - Returns: - dict: Passed env values with applied project environments. - - Raises: - KeyError: If project settings do not contain keys for project specific - environments. - """ - import acre - - if project_settings is None: - project_settings = get_project_settings(project_name) - - env_value = project_settings["core"]["project_environments"] - if env_value: - env_value = json.loads(env_value) - parsed_value = parse_environments(env_value, env_group) - env.update(acre.compute( - _merge_env(parsed_value, env), - cleanup=False - )) - return env - - -def prepare_context_environments(data, env_group=None, addons_manager=None): - """Modify launch environments with context data for launched host. - - Args: - data (EnvironmentPrepData): Dictionary where result and intermediate - result will be stored. - """ - - from ayon_core.pipeline.template_data import get_template_data - - # Context environments - log = data["log"] - - project_entity = data["project_entity"] - folder_entity = data["folder_entity"] - task_entity = data["task_entity"] - if not project_entity: - log.info( - "Skipping context environments preparation." - " Launch context does not contain required data." - ) - return - - # Load project specific environments - project_name = project_entity["name"] - project_settings = get_project_settings(project_name) - data["project_settings"] = project_settings - - app = data["app"] - context_env = { - "AYON_PROJECT_NAME": project_entity["name"], - "AYON_APP_NAME": app.full_name - } - if folder_entity: - folder_path = folder_entity["path"] - context_env["AYON_FOLDER_PATH"] = folder_path - - if task_entity: - context_env["AYON_TASK_NAME"] = task_entity["name"] - - log.debug( - "Context environments set:\n{}".format( - json.dumps(context_env, indent=4) - ) - ) - data["env"].update(context_env) - - # Apply project specific environments on current env value - # - apply them once the context environments are set - apply_project_environments_value( - project_name, data["env"], project_settings, env_group - ) - - if not app.is_host: - return - - data["env"]["AYON_HOST_NAME"] = app.host_name - - if not folder_entity or not task_entity: - # QUESTION replace with log.info and skip workfile discovery? - # - technically it should be possible to launch host without context - raise ApplicationLaunchFailed( - "Host launch require folder and task context." - ) - - workdir_data = get_template_data( - project_entity, - folder_entity, - task_entity, - app.host_name, - project_settings - ) - data["workdir_data"] = workdir_data - - anatomy = data["anatomy"] - - task_type = workdir_data["task"]["type"] - # Temp solution how to pass task type to `_prepare_last_workfile` - data["task_type"] = task_type - - try: - from ayon_core.pipeline.workfile import get_workdir_with_workdir_data - - workdir = get_workdir_with_workdir_data( - workdir_data, - anatomy.project_name, - anatomy, - project_settings=project_settings - ) - - except Exception as exc: - raise ApplicationLaunchFailed( - "Error in anatomy.format: {}".format(str(exc)) - ) - - if not os.path.exists(workdir): - log.debug( - "Creating workdir folder: \"{}\"".format(workdir) - ) - try: - os.makedirs(workdir) - except Exception as exc: - raise ApplicationLaunchFailed( - "Couldn't create workdir because: {}".format(str(exc)) - ) - - data["env"]["AYON_WORKDIR"] = workdir - - _prepare_last_workfile(data, workdir, addons_manager) - - -def _prepare_last_workfile(data, workdir, addons_manager): - """last workfile workflow preparation. - - Function check if should care about last workfile workflow and tries - to find the last workfile. Both information are stored to `data` and - environments. - - Last workfile is filled always (with version 1) even if any workfile - exists yet. - - Args: - data (EnvironmentPrepData): Dictionary where result and intermediate - result will be stored. - workdir (str): Path to folder where workfiles should be stored. - """ - - from ayon_core.addon import AddonsManager - from ayon_core.pipeline import HOST_WORKFILE_EXTENSIONS - from ayon_core.pipeline.workfile import ( - should_use_last_workfile_on_launch, - should_open_workfiles_tool_on_launch, - ) - - if not addons_manager: - addons_manager = AddonsManager() - - log = data["log"] - - _workdir_data = data.get("workdir_data") - if not _workdir_data: - log.info( - "Skipping last workfile preparation." - " Key `workdir_data` not filled." - ) - return - - app = data["app"] - workdir_data = copy.deepcopy(_workdir_data) - project_name = data["project_name"] - task_name = data["task_name"] - task_type = data["task_type"] - - start_last_workfile = data.get("start_last_workfile") - if start_last_workfile is None: - start_last_workfile = should_use_last_workfile_on_launch( - project_name, app.host_name, task_name, task_type - ) - else: - log.info("Opening of last workfile was disabled by user") - - data["start_last_workfile"] = start_last_workfile - - workfile_startup = should_open_workfiles_tool_on_launch( - project_name, app.host_name, task_name, task_type - ) - data["workfile_startup"] = workfile_startup - - # Store boolean as "0"(False) or "1"(True) - data["env"]["AVALON_OPEN_LAST_WORKFILE"] = ( - str(int(bool(start_last_workfile))) - ) - data["env"]["AYON_WORKFILE_TOOL_ON_START"] = ( - str(int(bool(workfile_startup))) - ) - - _sub_msg = "" if start_last_workfile else " not" - log.debug( - "Last workfile should{} be opened on start.".format(_sub_msg) - ) - - # Last workfile path - last_workfile_path = data.get("last_workfile_path") or "" - if not last_workfile_path: - host_addon = addons_manager.get_host_addon(app.host_name) - if host_addon: - extensions = host_addon.get_workfile_extensions() - else: - extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) - - if extensions: - from ayon_core.pipeline.workfile import ( - get_workfile_template_key, - get_last_workfile - ) - - anatomy = data["anatomy"] - project_settings = data["project_settings"] - task_type = workdir_data["task"]["type"] - template_key = get_workfile_template_key( - project_name, - task_type, - app.host_name, - project_settings=project_settings - ) - # Find last workfile - file_template = anatomy.get_template_item( - "work", template_key, "file" - ).template - - workdir_data.update({ - "version": 1, - "user": get_ayon_username(), - "ext": extensions[0] - }) - - last_workfile_path = get_last_workfile( - workdir, file_template, workdir_data, extensions, True - ) - - if os.path.exists(last_workfile_path): - log.debug(( - "Workfiles for launch context does not exists" - " yet but path will be set." - )) - log.debug( - "Setting last workfile path: {}".format(last_workfile_path) - ) - - data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path - data["last_workfile_path"] = last_workfile_path diff --git a/client/ayon_core/lib/terminal.py b/client/ayon_core/lib/terminal.py index a22f2358aa..10fcc79a27 100644 --- a/client/ayon_core/lib/terminal.py +++ b/client/ayon_core/lib/terminal.py @@ -1,15 +1,5 @@ # -*- coding: utf-8 -*- """Package helping with colorizing and formatting terminal output.""" -# :: -# //. ... .. ///. //. -# ///\\\ \\\ \\ ///\\\ /// -# /// \\ \\\ \\ /// \\ /// // -# \\\ // \\\ // \\\ // \\\// ./ -# \\\// \\\// \\\// \\\' // -# \\\ \\\ \\\ \\\// -# ''' ''' ''' ''' -# ..---===[[ PyP3 Setup ]]===---... -# import re import time import threading diff --git a/client/ayon_core/lib/transcoding.py b/client/ayon_core/lib/transcoding.py index 08e0bc9237..4d778c2091 100644 --- a/client/ayon_core/lib/transcoding.py +++ b/client/ayon_core/lib/transcoding.py @@ -45,7 +45,7 @@ ARRAY_TYPE_REGEX = re.compile(r"^(int|float|string)\[\d+\]$") IMAGE_EXTENSIONS = { ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", - ".cal", ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", + ".cal", ".cin", ".cpc", ".cpt", ".dds", ".dng", ".dpx", ".ecw", ".exr", ".fits", ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer", ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2", ".jng", ".jpeg", ".jpeg-ls", ".jpeg-hdr", ".2000", ".jpg", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index a284464009..675346105c 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -80,6 +80,8 @@ class AfterEffectsSubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py index ae19e63a37..ab342c1a9d 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py @@ -102,6 +102,8 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py index cf124c0bcc..bfb65708e6 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py @@ -225,6 +225,8 @@ class FusionSubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py index beb8afc3a3..d52b16b27d 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -273,6 +273,8 @@ class HarmonySubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py index 1abefa515a..cba05f6948 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py @@ -106,12 +106,14 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", "AYON_WORKDIR", "AYON_APP_NAME", - "IS_TEST" + "IS_TEST", ] environment = { diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py index 5602b02707..0300b12104 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -207,6 +207,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py index ac01af901c..d70cb75bf3 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -376,6 +376,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, keys = [ "PYTHONPATH", "PATH", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", @@ -388,7 +390,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, "TOOL_ENV", "FOUNDRY_LICENSE", "OPENPYPE_SG_USER", - "AYON_BUNDLE_NAME", ] # add allowed keys from preset if any diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py index 50bd414587..4e4657d886 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py @@ -67,7 +67,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, "FTRACK_SERVER", "AYON_APP_NAME", "AYON_USERNAME", - "OPENPYPE_SG_USER", + "AYON_SG_USERNAME", "KITSU_LOGIN", "KITSU_PWD" ] @@ -133,6 +133,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, "AYON_RENDER_JOB": "0", "AYON_REMOTE_PUBLISH": "0", "AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"], + "AYON_DEFAULT_SETTINGS_VARIANT": ( + os.environ["AYON_DEFAULT_SETTINGS_VARIANT"] + ), } # add environments from self.environ_keys diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py index 84bac6d017..8def9cc63c 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py @@ -130,7 +130,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, "FTRACK_SERVER", "AYON_APP_NAME", "AYON_USERNAME", - "OPENPYPE_SG_USER", + "AYON_SG_USERNAME", "KITSU_LOGIN", "KITSU_PWD" ] @@ -210,6 +210,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, "AYON_RENDER_JOB": "0", "AYON_REMOTE_PUBLISH": "0", "AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"], + "AYON_DEFAULT_SETTINGS_VARIANT": ( + os.environ["AYON_DEFAULT_SETTINGS_VARIANT"] + ), } # add environments from self.environ_keys diff --git a/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 8df96b425e..ac04407f5b 100644 --- a/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -13,7 +13,7 @@ from Deadline.Scripting import ( FileUtils, DirectoryUtils, ) -__version__ = "1.0.1" +__version__ = "1.1.0" VERSION_REGEX = re.compile( r"(?P0|[1-9]\d*)" r"\.(?P0|[1-9]\d*)" @@ -463,19 +463,13 @@ def inject_ayon_environment(deadlinePlugin): export_url = os.path.join(tempfile.gettempdir(), temp_file_name) print(">>> Temporary path: {}".format(export_url)) - args = [ - "--headless", - "extractenvironments", - export_url - ] - add_kwargs = { "envgroup": "farm", } # Support backwards compatible keys for key, env_keys in ( ("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]), - ("asset", ["AYON_FOLDER_PATH", "AVALON_ASSET"]), + ("folder", ["AYON_FOLDER_PATH", "AVALON_ASSET"]), ("task", ["AYON_TASK_NAME", "AVALON_TASK"]), ("app", ["AYON_APP_NAME", "AVALON_APP_NAME"]), ): @@ -486,18 +480,37 @@ def inject_ayon_environment(deadlinePlugin): break add_kwargs[key] = value - if job.GetJobEnvironmentKeyValue("IS_TEST"): - args.append("--automatic-tests") - - if all(add_kwargs.values()): - for key, value in add_kwargs.items(): - args.extend(["--{}".format(key), value]) - else: + if not all(add_kwargs.values()): raise RuntimeError(( "Missing required env vars: AYON_PROJECT_NAME," " AYON_FOLDER_PATH, AYON_TASK_NAME, AYON_APP_NAME" )) + # Use applications addon arguments + # TODO validate if applications addon should be used + args = [ + "--headless", + "addon", + "applications", + "extractenvironments", + export_url + ] + # Backwards compatibility for older versions + legacy_args = [ + "--headless", + "extractenvironments", + export_url + ] + if job.GetJobEnvironmentKeyValue("IS_TEST"): + args.append("--automatic-tests") + + for key, value in add_kwargs.items(): + args.extend(["--{}".format(key), value]) + # Legacy arguments expect '--asset' instead of '--folder' + if key == "folder": + key = "asset" + legacy_args.extend(["--{}".format(key), value]) + environment = { "AYON_SERVER_URL": ayon_server_url, "AYON_API_KEY": ayon_api_key, @@ -516,9 +529,18 @@ def inject_ayon_environment(deadlinePlugin): ) if process_exitcode != 0: - raise RuntimeError( - "Failed to run Ayon process to extract environments." + print( + "Failed to run AYON process to extract environments. Trying" + " to use legacy arguments." ) + legacy_args_str = subprocess.list2cmdline(legacy_args) + process_exitcode = deadlinePlugin.RunProcess( + exe, legacy_args_str, os.path.dirname(exe), -1 + ) + if process_exitcode != 0: + raise RuntimeError( + "Failed to run AYON process to extract environments." + ) print(">>> Loading file ...") with open(export_url) as fp: diff --git a/client/ayon_core/modules/job_queue/addon.py b/client/ayon_core/modules/job_queue/addon.py index 32d06d0040..0fa54eb2f0 100644 --- a/client/ayon_core/modules/job_queue/addon.py +++ b/client/ayon_core/modules/job_queue/addon.py @@ -168,7 +168,7 @@ class JobQueueAddon(AYONAddon): @classmethod def start_worker(cls, app_name, server_url=None): import requests - from ayon_core.lib import ApplicationManager + from ayon_applications import ApplicationManager if not server_url: server_url = cls.get_server_url_from_settings() diff --git a/client/ayon_core/modules/royalrender/lib.py b/client/ayon_core/modules/royalrender/lib.py index 5392803710..82bc96e759 100644 --- a/client/ayon_core/modules/royalrender/lib.py +++ b/client/ayon_core/modules/royalrender/lib.py @@ -308,31 +308,45 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin, export_url = os.path.join(tempfile.gettempdir(), temp_file_name) print(">>> Temporary path: {}".format(export_url)) - args = [ - "--headless", - "extractenvironments", - export_url - ] - anatomy_data = instance.context.data["anatomyData"] + addons_manager = instance.context.data["ayonAddonsManager"] + applications_addon = addons_manager.get_enabled_addon("applications") + + folder_key = "folder" + if applications_addon is None: + # Use 'asset' when applications addon command is not used + folder_key = "asset" add_kwargs = { "project": anatomy_data["project"]["name"], - "asset": instance.context.data["folderPath"], + folder_key: instance.context.data["folderPath"], "task": anatomy_data["task"]["name"], "app": instance.context.data.get("appName"), "envgroup": "farm" } - if os.getenv('IS_TEST'): - args.append("--automatic-tests") - if not all(add_kwargs.values()): raise RuntimeError(( "Missing required env vars: AYON_PROJECT_NAME, AYON_FOLDER_PATH," " AYON_TASK_NAME, AYON_APP_NAME" )) + args = ["--headless"] + # Use applications addon to extract environments + # NOTE this is for backwards compatibility, the global command + # will be removed in future and only applications addon command + # should be used. + if applications_addon is not None: + args.extend(["addon", "applications"]) + + args.extend([ + "extractenvironments", + export_url + ]) + + if os.getenv('IS_TEST'): + args.append("--automatic-tests") + for key, value in add_kwargs.items(): args.extend([f"--{key}", value]) self.log.debug("Executing: {}".format(" ".join(args))) diff --git a/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py b/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py index 662913cadf..f3287b7638 100644 --- a/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py +++ b/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py @@ -65,7 +65,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin, "FTRACK_SERVER", "AYON_APP_NAME", "AYON_USERNAME", - "OPENPYPE_SG_USER", + "AYON_SG_USERNAME", ] priority = 50 diff --git a/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py b/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py index da5d430939..b402d4034a 100644 --- a/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py +++ b/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PostLaunchHook, LaunchTypes +from ayon_applications import PostLaunchHook, LaunchTypes class PostStartTimerHook(PostLaunchHook): diff --git a/client/ayon_core/pipeline/anatomy/anatomy.py b/client/ayon_core/pipeline/anatomy/anatomy.py index 0d250116bd..73dd215233 100644 --- a/client/ayon_core/pipeline/anatomy/anatomy.py +++ b/client/ayon_core/pipeline/anatomy/anatomy.py @@ -447,7 +447,7 @@ class CacheItem: class Anatomy(BaseAnatomy): - _sync_server_addon_cache = CacheItem() + _sitesync_addon_cache = CacheItem() _project_cache = collections.defaultdict(CacheItem) _default_site_id_cache = collections.defaultdict(CacheItem) _root_overrides_cache = collections.defaultdict( @@ -482,13 +482,13 @@ class Anatomy(BaseAnatomy): return copy.deepcopy(project_cache.data) @classmethod - def get_sync_server_addon(cls): - if cls._sync_server_addon_cache.is_outdated: + def get_sitesync_addon(cls): + if cls._sitesync_addon_cache.is_outdated: manager = AddonsManager() - cls._sync_server_addon_cache.update_data( - manager.get_enabled_addon("sync_server") + cls._sitesync_addon_cache.update_data( + manager.get_enabled_addon("sitesync") ) - return cls._sync_server_addon_cache.data + return cls._sitesync_addon_cache.data @classmethod def _get_studio_roots_overrides(cls, project_name): @@ -525,8 +525,8 @@ class Anatomy(BaseAnatomy): """ # First check if sync server is available and enabled - sync_server = cls.get_sync_server_addon() - if sync_server is None or not sync_server.enabled: + sitesync_addon = cls.get_sitesync_addon() + if sitesync_addon is None or not sitesync_addon.enabled: # QUESTION is ok to force 'studio' when site sync is not enabled? site_name = "studio" @@ -535,7 +535,7 @@ class Anatomy(BaseAnatomy): project_cache = cls._default_site_id_cache[project_name] if project_cache.is_outdated: project_cache.update_data( - sync_server.get_active_site_type(project_name) + sitesync_addon.get_active_site_type(project_name) ) site_name = project_cache.data @@ -549,7 +549,7 @@ class Anatomy(BaseAnatomy): ) else: # Ask sync server to get roots overrides - roots_overrides = sync_server.get_site_root_overrides( + roots_overrides = sitesync.get_site_root_overrides( project_name, site_name ) site_cache.update_data(roots_overrides) diff --git a/client/ayon_core/pipeline/context_tools.py b/client/ayon_core/pipeline/context_tools.py index 84a17be8f2..e9151bcd1f 100644 --- a/client/ayon_core/pipeline/context_tools.py +++ b/client/ayon_core/pipeline/context_tools.py @@ -97,8 +97,8 @@ def install_host(host): """Install `host` into the running Python session. Args: - host (module): A Python module containing the Avalon - avalon host-interface. + host (HostBase): A host interface object. + """ global _is_installed @@ -154,6 +154,13 @@ def install_host(host): def install_ayon_plugins(project_name=None, host_name=None): + """Install AYON core plugins and make sure the core is initialized. + + Args: + project_name (Optional[str]): Name of project to install plugins for. + host_name (Optional[str]): Name of host to install plugins for. + + """ # Make sure global AYON connection has set site id and version # - this is necessary if 'install_host' is not called initialize_ayon_connection() @@ -223,6 +230,12 @@ def install_ayon_plugins(project_name=None, host_name=None): def install_openpype_plugins(project_name=None, host_name=None): + """Install AYON core plugins and make sure the core is initialized. + + Deprecated: + Use `install_ayon_plugins` instead. + + """ install_ayon_plugins(project_name, host_name) @@ -281,47 +294,6 @@ def deregister_host(): _registered_host["_"] = None -def debug_host(): - """A debug host, useful to debugging features that depend on a host""" - - host = types.ModuleType("debugHost") - - def ls(): - containers = [ - { - "representation": "ee-ft-a-uuid1", - "schema": "openpype:container-1.0", - "name": "Bruce01", - "objectName": "Bruce01_node", - "namespace": "_bruce01_", - "version": 3, - }, - { - "representation": "aa-bc-s-uuid2", - "schema": "openpype:container-1.0", - "name": "Bruce02", - "objectName": "Bruce01_node", - "namespace": "_bruce02_", - "version": 2, - } - ] - - for container in containers: - yield container - - host.__dict__.update({ - "ls": ls, - "open_file": lambda fname: None, - "save_file": lambda fname: None, - "current_file": lambda: os.path.expanduser("~/temp.txt"), - "has_unsaved_changes": lambda: False, - "work_root": lambda: os.path.expanduser("~/temp"), - "file_extensions": lambda: ["txt"], - }) - - return host - - def get_current_host_name(): """Current host name. @@ -347,7 +319,8 @@ def get_global_context(): Use 'get_current_context' to make sure you'll get current host integration context info. - Example: + Example:: + { "project_name": "Commercial", "folder_path": "Bunny", @@ -411,42 +384,67 @@ def get_current_project_entity(fields=None): return ayon_api.get_project(project_name, fields=fields) -def get_current_project_folder(folder_path=None, folder_id=None, fields=None): +def get_current_folder_entity(fields=None): """Helper function to get folder entity based on current context. This function should be called only in process where host is installed. - Folder is found out based on passed folder path or id (not both). Folder - path is not used for filtering if folder id is passed. When both - folder path and id are missing then current folder path is used. + Folder is based on current context project name and folder path. Args: - folder_path (Union[str, None]): Folder path used for filter. - folder_id (Union[str, None]): Folder id. If entered then - is used as only filter. fields (Optional[Iterable[str]]): Limit returned data of folder entity to specific keys. Returns: - Union[dict[str, Any], None]: Fodler entity or None. + Union[dict[str, Any], None]: Folder entity or None. + """ + context = get_current_context() + project_name = context["project_name"] + folder_path = context["folder_path"] - project_name = get_current_project_name() - if folder_id: - return ayon_api.get_folder_by_id( - project_name, folder_id, fields=fields - ) - - if not folder_path: - folder_path = get_current_folder_path() - # Skip if is not set even on context - if not folder_path: - return None + # Skip if is not set even on context + if not project_name or not folder_path: + return None return ayon_api.get_folder_by_path( project_name, folder_path, fields=fields ) +def get_current_task_entity(fields=None): + """Helper function to get task entity based on current context. + + This function should be called only in process where host is installed. + + Task is based on current context project name, folder path + and task name. + + Args: + fields (Optional[Iterable[str]]): Limit returned data of task entity + to specific keys. + + Returns: + Union[dict[str, Any], None]: Task entity or None. + + """ + context = get_current_context() + project_name = context["project_name"] + folder_path = context["folder_path"] + task_name = context["task_name"] + + # Skip if is not set even on context + if not project_name or not folder_path or not task_name: + return None + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"id"} + ) + if not folder_entity: + return None + return ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name, fields=fields + ) + + def is_representation_from_latest(representation): """Return whether the representation is from latest version @@ -515,88 +513,13 @@ def get_current_context_template_data(settings=None): ) -def get_workdir_from_session(session=None, template_key=None): - """Template data for template fill from session keys. - - Args: - session (Union[Dict[str, str], None]): The Session to use. If not - provided use the currently active global Session. - template_key (str): Prepared template key from which workdir is - calculated. - - Returns: - str: Workdir path. - """ - - if session is not None: - project_name = session["AYON_PROJECT_NAME"] - host_name = session["AYON_HOST_NAME"] - else: - project_name = get_current_project_name() - host_name = get_current_host_name() - template_data = get_template_data_from_session(session) - - if not template_key: - task_type = template_data["task"]["type"] - template_key = get_workfile_template_key( - project_name, - task_type, - host_name, - ) - - anatomy = Anatomy(project_name) - template_obj = anatomy.get_template_item("work", template_key, "directory") - path = template_obj.format_strict(template_data) - if path: - path = os.path.normpath(path) - return path - - -def get_custom_workfile_template_from_session( - session=None, project_settings=None -): - """Filter and fill workfile template profiles by current context. - - This function cab be used only inside host where context is set. - - Args: - session (Optional[Dict[str, str]]): Session from which are taken - data. - project_settings(Optional[Dict[str, Any]]): Project settings. - - Returns: - str: Path to template or None if none of profiles match current - context. (Existence of formatted path is not validated.) - """ - - if session is not None: - project_name = session["AYON_PROJECT_NAME"] - folder_path = session["AYON_FOLDER_PATH"] - task_name = session["AYON_TASK_NAME"] - host_name = session["AYON_HOST_NAME"] - else: - context = get_current_context() - project_name = context["project_name"] - folder_path = context["folder_path"] - task_name = context["task_name"] - host_name = get_current_host_name() - - return get_custom_workfile_template_by_string_context( - project_name, - folder_path, - task_name, - host_name, - project_settings=project_settings - ) - - def get_current_context_custom_workfile_template(project_settings=None): """Filter and fill workfile template profiles by current context. - This function can be used only inside host where context is set. + This function can be used only inside host where current context is set. Args: - project_settings(Optional[Dict[str, Any]]): Project settings. + project_settings (Optional[dict[str, Any]]): Project settings Returns: str: Path to template or None if none of profiles match current diff --git a/client/ayon_core/pipeline/create/README.md b/client/ayon_core/pipeline/create/README.md index bbfd1bfa0f..09d3a22222 100644 --- a/client/ayon_core/pipeline/create/README.md +++ b/client/ayon_core/pipeline/create/README.md @@ -8,7 +8,7 @@ Discovers Creator plugins to be able create new instances and convert existing i Publish plugins are loaded because they can also define attributes definitions. These are less product type specific To be able define attributes Publish plugin must inherit from `AYONPyblishPluginMixin` and must override `get_attribute_defs` class method which must return list of attribute definitions. Values of publish plugin definitions are stored per plugin name under `publish_attributes`. Also can override `convert_attribute_values` class method which gives ability to modify values on instance before are used in CreatedInstance. Method `convert_attribute_values` can be also used without `get_attribute_defs` to modify values when changing compatibility (remove metadata from instance because are irrelevant). -Possible attribute definitions can be found in `openpype/pipeline/lib/attribute_definitions.py`. +Possible attribute definitions can be found in `ayon_core/lib/attribute_definitions.py`. Except creating and removing instances are all changes not automatically propagated to host context (scene/workfile/...) to propagate changes call `save_changes` which trigger update of all instances in context using Creators implementation. diff --git a/client/ayon_core/pipeline/legacy_io.py b/client/ayon_core/pipeline/legacy_io.py deleted file mode 100644 index d5b555845b..0000000000 --- a/client/ayon_core/pipeline/legacy_io.py +++ /dev/null @@ -1,36 +0,0 @@ -import logging -from ayon_core.pipeline import get_current_project_name - -Session = {} - -log = logging.getLogger(__name__) -log.warning( - "DEPRECATION WARNING: 'legacy_io' is deprecated and will be removed in" - " future versions of ayon-core addon." - "\nReading from Session won't give you updated information and changing" - " values won't affect global state of a process." -) - - -def session_data_from_environment(context_keys=False): - return {} - - -def is_installed(): - return False - - -def install(): - pass - - -def uninstall(): - pass - - -def active_project(*args, **kwargs): - return get_current_project_name() - - -def current_project(*args, **kwargs): - return get_current_project_name() diff --git a/client/ayon_core/plugins/load/open_djv.py b/client/ayon_core/plugins/load/open_djv.py deleted file mode 100644 index 30023ac1f5..0000000000 --- a/client/ayon_core/plugins/load/open_djv.py +++ /dev/null @@ -1,64 +0,0 @@ -import os -from ayon_core.lib import ApplicationManager -from ayon_core.pipeline import load - - -def existing_djv_path(): - app_manager = ApplicationManager() - djv_list = [] - - for app_name, app in app_manager.applications.items(): - if 'djv' in app_name and app.find_executable(): - djv_list.append(app_name) - - return djv_list - - -class OpenInDJV(load.LoaderPlugin): - """Open Image Sequence with system default""" - - djv_list = existing_djv_path() - product_types = {"*"} if djv_list else [] - representations = ["*"] - extensions = { - "cin", "dpx", "avi", "dv", "gif", "flv", "mkv", "mov", "mpg", "mpeg", - "mp4", "m4v", "mxf", "iff", "z", "ifl", "jpeg", "jpg", "jfif", "lut", - "1dl", "exr", "pic", "png", "ppm", "pnm", "pgm", "pbm", "rla", "rpf", - "sgi", "rgba", "rgb", "bw", "tga", "tiff", "tif", "img", "h264", - } - - label = "Open in DJV" - order = -10 - icon = "play-circle" - color = "orange" - - def load(self, context, name, namespace, data): - import clique - - path = self.filepath_from_context(context) - directory = os.path.dirname(path) - - pattern = clique.PATTERNS["frames"] - files = os.listdir(directory) - collections, remainder = clique.assemble( - files, - patterns=[pattern], - minimum_items=1 - ) - - if not remainder: - sequence = collections[0] - first_image = list(sequence)[0] - else: - first_image = path - filepath = os.path.normpath(os.path.join(directory, first_image)) - - self.log.info("Opening : {}".format(filepath)) - - last_djv_version = sorted(self.djv_list)[-1] - - app_manager = ApplicationManager() - djv = app_manager.applications.get(last_djv_version) - djv.arguments.append(filepath) - - app_manager.launch(last_djv_version) diff --git a/client/ayon_core/plugins/publish/collect_host_name.py b/client/ayon_core/plugins/publish/collect_host_name.py index e76579bbd2..ea4ec7ad41 100644 --- a/client/ayon_core/plugins/publish/collect_host_name.py +++ b/client/ayon_core/plugins/publish/collect_host_name.py @@ -1,14 +1,13 @@ """ Requires: None + Provides: - context -> host (str) + context -> hostName (str) """ import os import pyblish.api -from ayon_core.lib import ApplicationManager - class CollectHostName(pyblish.api.ContextPlugin): """Collect avalon host name to context.""" @@ -18,30 +17,8 @@ class CollectHostName(pyblish.api.ContextPlugin): def process(self, context): host_name = context.data.get("hostName") - app_name = context.data.get("appName") - app_label = context.data.get("appLabel") - # Don't override value if is already set - if host_name and app_name and app_label: + if host_name: return # Use AYON_HOST_NAME to get host name if available - if not host_name: - host_name = os.environ.get("AYON_HOST_NAME") - - # Use AYON_APP_NAME to get full app name - if not app_name: - app_name = os.environ.get("AYON_APP_NAME") - - # Fill missing values based on app full name - if (not host_name or not app_label) and app_name: - app_manager = ApplicationManager() - app = app_manager.applications.get(app_name) - if app: - if not host_name: - host_name = app.host_name - if not app_label: - app_label = app.full_label - - context.data["hostName"] = host_name - context.data["appName"] = app_name - context.data["appLabel"] = app_label + context.data["hostName"] = os.environ.get("AYON_HOST_NAME") diff --git a/client/ayon_core/plugins/publish/collect_settings.py b/client/ayon_core/plugins/publish/collect_settings.py index 66b89a114c..db58e7eaa9 100644 --- a/client/ayon_core/plugins/publish/collect_settings.py +++ b/client/ayon_core/plugins/publish/collect_settings.py @@ -1,5 +1,5 @@ from pyblish import api -from ayon_core.settings import get_current_project_settings +from ayon_core.settings import get_project_settings class CollectSettings(api.ContextPlugin): @@ -9,4 +9,9 @@ class CollectSettings(api.ContextPlugin): label = "Collect Settings" def process(self, context): - context.data["project_settings"] = get_current_project_settings() + project_name = context.data["projectName"] + self.log.debug( + "Collecting settings for project: {}".format(project_name) + ) + project_settings = get_project_settings(project_name) + context.data["project_settings"] = project_settings diff --git a/client/ayon_core/plugins/publish/extract_burnin.py b/client/ayon_core/plugins/publish/extract_burnin.py index ab6353a29f..93774842ca 100644 --- a/client/ayon_core/plugins/publish/extract_burnin.py +++ b/client/ayon_core/plugins/publish/extract_burnin.py @@ -27,7 +27,7 @@ class ExtractBurnin(publish.Extractor): Extractor to create video with pre-defined burnins from existing extracted video representation. - It will work only on represenations having `burnin = True` or + It will work only on representations having `burnin = True` or `tags` including `burnin` """ @@ -125,7 +125,7 @@ class ExtractBurnin(publish.Extractor): burnin_defs = copy.deepcopy(src_burnin_defs) - # Filter output definition by `burnin` represetation key + # Filter output definition by `burnin` representation key repre_linked_burnins = [ burnin_def for burnin_def in burnin_defs @@ -194,6 +194,16 @@ class ExtractBurnin(publish.Extractor): ).format(host_name, product_type, task_name, profile)) return + burnins_per_repres = self._get_burnins_per_representations( + instance, burnin_defs + ) + if not burnins_per_repres: + self.log.debug( + "Skipped instance. No representations found matching a burnin" + "definition in: %s", burnin_defs + ) + return + burnin_options = self._get_burnin_options() # Prepare basic data for processing @@ -204,9 +214,6 @@ class ExtractBurnin(publish.Extractor): # Args that will execute the script executable_args = ["run", scriptpath] - burnins_per_repres = self._get_burnins_per_representations( - instance, burnin_defs - ) for repre, repre_burnin_defs in burnins_per_repres: # Create copy of `_burnin_data` and `_temp_data` for repre. burnin_data = copy.deepcopy(_burnin_data) @@ -371,6 +378,7 @@ class ExtractBurnin(publish.Extractor): # Prepare subprocess arguments args = list(executable_args) args.append(temporary_json_filepath) + args.append("--headless") self.log.debug("Executing: {}".format(" ".join(args))) # Run burnin script @@ -540,7 +548,7 @@ class ExtractBurnin(publish.Extractor): return burnin_data, temp_data def repres_is_valid(self, repre): - """Validation if representaion should be processed. + """Validation if representation should be processed. Args: repre (dict): Representation which should be checked. @@ -572,7 +580,7 @@ class ExtractBurnin(publish.Extractor): tags (list): Tags of processed representation. Returns: - list: Containg all burnin definitions matching entered tags. + list: Contain all burnin definitions matching entered tags. """ filtered_burnins = [] @@ -597,7 +605,7 @@ class ExtractBurnin(publish.Extractor): Store data to `temp_data` for keys "full_input_path" which is full path to source files optionally with sequence formatting, - "full_output_path" full path to otput with optionally with sequence + "full_output_path" full path to output with optionally with sequence formatting, "full_input_paths" list of all source files which will be deleted when burnin script ends, "repre_files" list of output filenames. @@ -747,7 +755,7 @@ class ExtractBurnin(publish.Extractor): profile (dict): Profile from presets matching current context. Returns: - list: Containg all valid output definitions. + list: Contain all valid output definitions. """ filtered_burnin_defs = [] @@ -768,7 +776,7 @@ class ExtractBurnin(publish.Extractor): ): self.log.debug(( "Skipped burnin definition \"{}\". Family" - " fiters ({}) does not match current instance families: {}" + " filters ({}) does not match current instance families: {}" ).format( filename_suffix, str(families_filters), str(families) )) diff --git a/client/ayon_core/plugins/publish/extract_review.py b/client/ayon_core/plugins/publish/extract_review.py index 905158c851..d51a7374d3 100644 --- a/client/ayon_core/plugins/publish/extract_review.py +++ b/client/ayon_core/plugins/publish/extract_review.py @@ -619,7 +619,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # Prepare input and output filepaths self.input_output_paths(new_repre, output_def, temp_data) - # Set output frames len to 1 when ouput is single image + # Set output frames len to 1 when output is single image if ( temp_data["output_ext_is_image"] and not temp_data["output_is_sequence"] @@ -955,7 +955,7 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("New representation ext: `{}`".format(output_ext)) - # Output is image file sequence witht frames + # Output is image file sequence with frames output_ext_is_image = bool(output_ext in self.image_exts) output_is_sequence = bool( output_ext_is_image @@ -967,7 +967,7 @@ class ExtractReview(pyblish.api.InstancePlugin): frame_end = temp_data["output_frame_end"] filename_base = "{}_{}".format(filename, filename_suffix) - # Temporary tempalte for frame filling. Example output: + # Temporary template for frame filling. Example output: # "basename.%04d.exr" when `frame_end` == 1001 repr_file = "{}.%{:0>2}d.{}".format( filename_base, len(str(frame_end)), output_ext @@ -997,7 +997,7 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("Creating dir: {}".format(dst_staging_dir)) os.makedirs(dst_staging_dir) - # Store stagingDir to representaion + # Store stagingDir to representation new_repre["stagingDir"] = dst_staging_dir # Store paths to temp data @@ -1225,19 +1225,13 @@ class ExtractReview(pyblish.api.InstancePlugin): filters = [] # if reformat input video file is already reforamted from upstream - reformat_in_baking = bool("reformated" in new_repre["tags"]) + reformat_in_baking = ( + "reformatted" in new_repre["tags"] + # Backwards compatibility + or "reformated" in new_repre["tags"] + ) self.log.debug("reformat_in_baking: `{}`".format(reformat_in_baking)) - # Get instance data - pixel_aspect = temp_data["pixel_aspect"] - - if reformat_in_baking: - self.log.debug(( - "Using resolution from input. It is already " - "reformated from upstream process" - )) - pixel_aspect = 1 - # NOTE Skipped using instance's resolution full_input_path_single_file = temp_data["full_input_path_single_file"] try: @@ -1268,7 +1262,7 @@ class ExtractReview(pyblish.api.InstancePlugin): if reformat_in_baking: self.log.debug(( "Using resolution from input. It is already " - "reformated from upstream process" + "reformatted from upstream process" )) pixel_aspect = 1 output_width = input_width @@ -1374,7 +1368,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # Make sure output width and height is not an odd number # When this can happen: # - if output definition has set width and height with odd number - # - `instance.data` contain width and height with odd numbeer + # - `instance.data` contain width and height with odd number if output_width % 2 != 0: self.log.warning(( "Converting output width from odd to even number. {} -> {}" @@ -1555,7 +1549,7 @@ class ExtractReview(pyblish.api.InstancePlugin): custom_tags (list): Custom Tags of processed representation. Returns: - list: Containg all output definitions matching entered tags. + list: Containing all output definitions matching entered tags. """ filtered_outputs = [] @@ -1820,8 +1814,8 @@ class OverscanCrop: """ # crop=width:height:x:y - explicit start x, y position # crop=width:height - x, y are related to center by width/height - # pad=width:heigth:x:y - explicit start x, y position - # pad=width:heigth - x, y are set to 0 by default + # pad=width:height:x:y - explicit start x, y position + # pad=width:height - x, y are set to 0 by default width = self.width() height = self.height() @@ -1869,7 +1863,7 @@ class OverscanCrop: # Replace "px" (and spaces before) with single space string_value = re.sub(r"([ ]+)?px", " ", string_value) string_value = re.sub(r"([ ]+)%", "%", string_value) - # Make sure +/- sign at the beggining of string is next to number + # Make sure +/- sign at the beginning of string is next to number string_value = re.sub(r"^([\+\-])[ ]+", "\g<1>", string_value) # Make sure +/- sign in the middle has zero spaces before number under # which belongs diff --git a/client/ayon_core/plugins/publish/integrate_hero_version.py b/client/ayon_core/plugins/publish/integrate_hero_version.py index 7969457697..8c36719b77 100644 --- a/client/ayon_core/plugins/publish/integrate_hero_version.py +++ b/client/ayon_core/plugins/publish/integrate_hero_version.py @@ -90,6 +90,9 @@ class IntegrateHeroVersion( # *but all other plugins must be sucessfully completed def process(self, instance): + if not self.is_active(instance.data): + return + self.log.debug( "--- Integration of Hero version for product `{}` begins.".format( instance.data["productName"] diff --git a/client/ayon_core/settings/lib.py b/client/ayon_core/settings/lib.py index d72e4f357a..3929818d31 100644 --- a/client/ayon_core/settings/lib.py +++ b/client/ayon_core/settings/lib.py @@ -201,7 +201,7 @@ def get_current_project_settings(): Project name should be stored in environment variable `AYON_PROJECT_NAME`. This function should be used only in host context where environment variable must be set and should not happen that any part of process will - change the value of the enviornment variable. + change the value of the environment variable. """ project_name = os.environ.get("AYON_PROJECT_NAME") if not project_name: @@ -209,6 +209,3 @@ def get_current_project_settings(): "Missing context project in environemt variable `AYON_PROJECT_NAME`." ) return get_project_settings(project_name) - - - diff --git a/client/ayon_core/tools/launcher/models/actions.py b/client/ayon_core/tools/launcher/models/actions.py index 6da34151b6..32df600c87 100644 --- a/client/ayon_core/tools/launcher/models/actions.py +++ b/client/ayon_core/tools/launcher/models/actions.py @@ -2,6 +2,7 @@ import os from ayon_core import resources from ayon_core.lib import Logger, AYONSettingsRegistry +from ayon_core.addon import AddonsManager from ayon_core.pipeline.actions import ( discover_launcher_actions, LauncherAction, @@ -109,8 +110,8 @@ class ApplicationAction(LauncherAction): def process(self, selection, **kwargs): """Process the full Application action""" - from ayon_core.lib import ( - ApplictionExecutableNotFound, + from ayon_applications import ( + ApplicationExecutableNotFound, ApplicationLaunchFailed, ) @@ -122,7 +123,7 @@ class ApplicationAction(LauncherAction): **self.data ) - except ApplictionExecutableNotFound as exc: + except ApplicationExecutableNotFound as exc: details = exc.details msg = exc.msg log_msg = str(msg) @@ -270,6 +271,8 @@ class ActionsModel: self._launcher_tool_reg = AYONSettingsRegistry("launcher_tool") + self._addons_manager = None + @property def log(self): if self._log is None: @@ -410,6 +413,11 @@ class ActionsModel: } ) + def _get_addons_manager(self): + if self._addons_manager is None: + self._addons_manager = AddonsManager() + return self._addons_manager + def _get_no_last_workfile_reg_data(self): try: no_workfile_reg_data = self._launcher_tool_reg.get_item( @@ -489,19 +497,16 @@ class ActionsModel: return action_items def _get_applications_action_classes(self): - from ayon_core.lib.applications import ( - CUSTOM_LAUNCH_APP_GROUPS, - ApplicationManager, - ) - actions = [] - manager = ApplicationManager() + addons_manager = self._get_addons_manager() + applications_addon = addons_manager.get_enabled_addon("applications") + if applications_addon is None: + return actions + + manager = applications_addon.get_applications_manager() for full_name, application in manager.applications.items(): - if ( - application.group.name in CUSTOM_LAUNCH_APP_GROUPS - or not application.enabled - ): + if not application.enabled: continue action = type( diff --git a/client/ayon_core/tools/loader/abstract.py b/client/ayon_core/tools/loader/abstract.py index 33add0213b..7a7d335092 100644 --- a/client/ayon_core/tools/loader/abstract.py +++ b/client/ayon_core/tools/loader/abstract.py @@ -871,7 +871,7 @@ class FrontendLoaderController(_BaseLoaderController): # Site sync functions @abstractmethod - def is_site_sync_enabled(self, project_name=None): + def is_sitesync_enabled(self, project_name=None): """Is site sync enabled. Site sync addon can be enabled but can be disabled per project. diff --git a/client/ayon_core/tools/loader/control.py b/client/ayon_core/tools/loader/control.py index d8562f50ca..0c9bb369c7 100644 --- a/client/ayon_core/tools/loader/control.py +++ b/client/ayon_core/tools/loader/control.py @@ -113,7 +113,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): self._products_model = ProductsModel(self) self._loader_actions_model = LoaderActionsModel(self) self._thumbnails_model = ThumbnailsModel() - self._site_sync_model = SiteSyncModel(self) + self._sitesync_model = SiteSyncModel(self) @property def log(self): @@ -149,7 +149,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): self._loader_actions_model.reset() self._projects_model.reset() self._thumbnails_model.reset() - self._site_sync_model.reset() + self._sitesync_model.reset() self._projects_model.refresh() @@ -240,7 +240,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): project_name, representation_ids) ) - action_items.extend(self._site_sync_model.get_site_sync_action_items( + action_items.extend(self._sitesync_model.get_sitesync_action_items( project_name, representation_ids) ) @@ -254,8 +254,8 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): version_ids, representation_ids ): - if self._site_sync_model.is_site_sync_action(identifier): - self._site_sync_model.trigger_action_item( + if self._sitesync_model.is_sitesync_action(identifier): + self._sitesync_model.trigger_action_item( identifier, project_name, representation_ids @@ -368,24 +368,24 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): self._loaded_products_cache.update_data(product_ids) return self._loaded_products_cache.get_data() - def is_site_sync_enabled(self, project_name=None): - return self._site_sync_model.is_site_sync_enabled(project_name) + def is_sitesync_enabled(self, project_name=None): + return self._sitesync_model.is_sitesync_enabled(project_name) def get_active_site_icon_def(self, project_name): - return self._site_sync_model.get_active_site_icon_def(project_name) + return self._sitesync_model.get_active_site_icon_def(project_name) def get_remote_site_icon_def(self, project_name): - return self._site_sync_model.get_remote_site_icon_def(project_name) + return self._sitesync_model.get_remote_site_icon_def(project_name) def get_version_sync_availability(self, project_name, version_ids): - return self._site_sync_model.get_version_sync_availability( + return self._sitesync_model.get_version_sync_availability( project_name, version_ids ) def get_representations_sync_status( self, project_name, representation_ids ): - return self._site_sync_model.get_representations_sync_status( + return self._sitesync_model.get_representations_sync_status( project_name, representation_ids ) diff --git a/client/ayon_core/tools/loader/models/__init__.py b/client/ayon_core/tools/loader/models/__init__.py index 8e640659a0..10fd3da4d3 100644 --- a/client/ayon_core/tools/loader/models/__init__.py +++ b/client/ayon_core/tools/loader/models/__init__.py @@ -1,7 +1,7 @@ from .selection import SelectionModel from .products import ProductsModel from .actions import LoaderActionsModel -from .site_sync import SiteSyncModel +from .sitesync import SiteSyncModel __all__ = ( diff --git a/client/ayon_core/tools/loader/models/site_sync.py b/client/ayon_core/tools/loader/models/sitesync.py similarity index 90% rename from client/ayon_core/tools/loader/models/site_sync.py rename to client/ayon_core/tools/loader/models/sitesync.py index a589cf7fbe..987510905b 100644 --- a/client/ayon_core/tools/loader/models/site_sync.py +++ b/client/ayon_core/tools/loader/models/sitesync.py @@ -36,7 +36,7 @@ class SiteSyncModel: self._controller = controller self._site_icons = None - self._site_sync_enabled_cache = NestedCacheItem( + self._sitesync_enabled_cache = NestedCacheItem( levels=1, lifetime=self.lifetime ) self._active_site_cache = NestedCacheItem( @@ -57,17 +57,17 @@ class SiteSyncModel: ) manager = AddonsManager() - self._site_sync_addon = manager.get("sync_server") + self._sitesync_addon = manager.get("sitesync") def reset(self): self._site_icons = None - self._site_sync_enabled_cache.reset() + self._sitesync_enabled_cache.reset() self._active_site_cache.reset() self._remote_site_cache.reset() self._version_availability_cache.reset() self._repre_status_cache.reset() - def is_site_sync_enabled(self, project_name=None): + def is_sitesync_enabled(self, project_name=None): """Site sync is enabled for a project. Returns false if site sync addon is not available or enabled @@ -82,13 +82,13 @@ class SiteSyncModel: bool: Site sync is enabled. """ - if not self._is_site_sync_addon_enabled(): + if not self._is_sitesync_addon_enabled(): return False - cache = self._site_sync_enabled_cache[project_name] + cache = self._sitesync_enabled_cache[project_name] if not cache.is_valid: enabled = True if project_name: - enabled = self._site_sync_addon.is_project_enabled( + enabled = self._sitesync_addon.is_project_enabled( project_name, single=True ) cache.update_data(enabled) @@ -107,8 +107,8 @@ class SiteSyncModel: cache = self._active_site_cache[project_name] if not cache.is_valid: site_name = None - if project_name and self._is_site_sync_addon_enabled(): - site_name = self._site_sync_addon.get_active_site(project_name) + if project_name and self._is_sitesync_addon_enabled(): + site_name = self._sitesync_addon.get_active_site(project_name) cache.update_data(site_name) return cache.get_data() @@ -125,8 +125,8 @@ class SiteSyncModel: cache = self._remote_site_cache[project_name] if not cache.is_valid: site_name = None - if project_name and self._is_site_sync_addon_enabled(): - site_name = self._site_sync_addon.get_remote_site(project_name) + if project_name and self._is_sitesync_addon_enabled(): + site_name = self._sitesync_addon.get_remote_site(project_name) cache.update_data(site_name) return cache.get_data() @@ -140,7 +140,7 @@ class SiteSyncModel: Union[dict[str, Any], None]: Site icon definition. """ - if not project_name or not self.is_site_sync_enabled(project_name): + if not project_name or not self.is_sitesync_enabled(project_name): return None active_site = self.get_active_site(project_name) return self._get_site_icon_def(project_name, active_site) @@ -155,14 +155,14 @@ class SiteSyncModel: Union[dict[str, Any], None]: Site icon definition. """ - if not project_name or not self.is_site_sync_enabled(project_name): + if not project_name or not self.is_sitesync_enabled(project_name): return None remote_site = self.get_remote_site(project_name) return self._get_site_icon_def(project_name, remote_site) def _get_site_icon_def(self, project_name, site_name): # use different icon for studio even if provider is 'local_drive' - if site_name == self._site_sync_addon.DEFAULT_SITE: + if site_name == self._sitesync_addon.DEFAULT_SITE: provider = "studio" else: provider = self._get_provider_for_site(project_name, site_name) @@ -179,7 +179,7 @@ class SiteSyncModel: dict[str, tuple[int, int]] """ - if not self.is_site_sync_enabled(project_name): + if not self.is_sitesync_enabled(project_name): return { version_id: _default_version_availability() for version_id in version_ids @@ -217,7 +217,7 @@ class SiteSyncModel: dict[str, tuple[float, float]] """ - if not self.is_site_sync_enabled(project_name): + if not self.is_sitesync_enabled(project_name): return { repre_id: _default_repre_status() for repre_id in representation_ids @@ -242,7 +242,7 @@ class SiteSyncModel: output[repre_id] = repre_cache.get_data() return output - def get_site_sync_action_items(self, project_name, representation_ids): + def get_sitesync_action_items(self, project_name, representation_ids): """ Args: @@ -253,7 +253,7 @@ class SiteSyncModel: list[ActionItem]: Actions that can be shown in loader. """ - if not self.is_site_sync_enabled(project_name): + if not self.is_sitesync_enabled(project_name): return [] repres_status = self.get_representations_sync_status( @@ -289,7 +289,7 @@ class SiteSyncModel: return action_items - def is_site_sync_action(self, identifier): + def is_sitesync_action(self, identifier): """Should be `identifier` handled by SiteSync. Args: @@ -353,22 +353,22 @@ class SiteSyncModel: ) elif identifier == REMOVE_IDENTIFIER: - self._site_sync_addon.remove_site( + self._sitesync_addon.remove_site( project_name, repre_id, active_site, remove_local_files=True ) - def _is_site_sync_addon_enabled(self): + def _is_sitesync_addon_enabled(self): """ Returns: bool: Site sync addon is enabled. """ - if self._site_sync_addon is None: + if self._sitesync_addon is None: return False - return self._site_sync_addon.enabled + return self._sitesync_addon.enabled def _get_provider_for_site(self, project_name, site_name): """Provider for a site. @@ -381,9 +381,9 @@ class SiteSyncModel: Union[str, None]: Provider name. """ - if not self._is_site_sync_addon_enabled(): + if not self._is_sitesync_addon_enabled(): return None - return self._site_sync_addon.get_provider_for_site( + return self._sitesync_addon.get_provider_for_site( project_name, site_name ) @@ -398,7 +398,7 @@ class SiteSyncModel: return None if self._site_icons is None: - self._site_icons = self._site_sync_addon.get_site_icons() + self._site_icons = self._sitesync_addon.get_site_icons() return self._site_icons.get(provider) def _refresh_version_availability(self, project_name, version_ids): @@ -406,7 +406,7 @@ class SiteSyncModel: return project_cache = self._version_availability_cache[project_name] - avail_by_id = self._site_sync_addon.get_version_availability( + avail_by_id = self._sitesync_addon.get_version_availability( project_name, version_ids, self.get_active_site(project_name), @@ -425,7 +425,7 @@ class SiteSyncModel: return project_cache = self._repre_status_cache[project_name] status_by_repre_id = ( - self._site_sync_addon.get_representations_sync_state( + self._sitesync_addon.get_representations_sync_state( project_name, representation_ids, self.get_active_site(project_name), @@ -496,7 +496,7 @@ class SiteSyncModel: ) def _add_site(self, project_name, repre_entity, site_name, product_type): - self._site_sync_addon.add_site( + self._sitesync_addon.add_site( project_name, repre_entity["id"], site_name, force=True ) @@ -513,7 +513,7 @@ class SiteSyncModel: try: print("Adding {} to linked representation: {}".format( site_name, link_repre_id)) - self._site_sync_addon.add_site( + self._sitesync_addon.add_site( project_name, link_repre_id, site_name, diff --git a/client/ayon_core/tools/loader/ui/products_model.py b/client/ayon_core/tools/loader/ui/products_model.py index c51172849a..41342ba0df 100644 --- a/client/ayon_core/tools/loader/ui/products_model.py +++ b/client/ayon_core/tools/loader/ui/products_model.py @@ -73,7 +73,7 @@ class ProductsModel(QtGui.QStandardItemModel): published_time_col = column_labels.index("Time") folders_label_col = column_labels.index("Folder") in_scene_col = column_labels.index("In scene") - site_sync_avail_col = column_labels.index("Availability") + sitesync_avail_col = column_labels.index("Availability") def __init__(self, controller): super(ProductsModel, self).__init__() diff --git a/client/ayon_core/tools/loader/ui/products_widget.py b/client/ayon_core/tools/loader/ui/products_widget.py index 3025ec18bd..d9f027153e 100644 --- a/client/ayon_core/tools/loader/ui/products_widget.py +++ b/client/ayon_core/tools/loader/ui/products_widget.py @@ -139,9 +139,9 @@ class ProductsWidget(QtWidgets.QWidget): products_view.setItemDelegateForColumn( products_model.in_scene_col, in_scene_delegate) - site_sync_delegate = SiteSyncDelegate() + sitesync_delegate = SiteSyncDelegate() products_view.setItemDelegateForColumn( - products_model.site_sync_avail_col, site_sync_delegate) + products_model.sitesync_avail_col, sitesync_delegate) main_layout = QtWidgets.QHBoxLayout(self) main_layout.setContentsMargins(0, 0, 0, 0) @@ -176,7 +176,7 @@ class ProductsWidget(QtWidgets.QWidget): self._version_delegate = version_delegate self._time_delegate = time_delegate self._in_scene_delegate = in_scene_delegate - self._site_sync_delegate = site_sync_delegate + self._sitesync_delegate = sitesync_delegate self._selected_project_name = None self._selected_folder_ids = set() @@ -192,8 +192,8 @@ class ProductsWidget(QtWidgets.QWidget): products_model.in_scene_col, not controller.is_loaded_products_supported() ) - self._set_site_sync_visibility( - self._controller.is_site_sync_enabled() + self._set_sitesync_visibility( + self._controller.is_sitesync_enabled() ) def set_name_filter(self, name): @@ -229,10 +229,10 @@ class ProductsWidget(QtWidgets.QWidget): def refresh(self): self._refresh_model() - def _set_site_sync_visibility(self, site_sync_enabled): + def _set_sitesync_visibility(self, sitesync_enabled): self._products_view.setColumnHidden( - self._products_model.site_sync_avail_col, - not site_sync_enabled + self._products_model.sitesync_avail_col, + not sitesync_enabled ) def _fill_version_editor(self): @@ -395,10 +395,10 @@ class ProductsWidget(QtWidgets.QWidget): def _on_folders_selection_change(self, event): project_name = event["project_name"] - site_sync_enabled = self._controller.is_site_sync_enabled( + sitesync_enabled = self._controller.is_sitesync_enabled( project_name ) - self._set_site_sync_visibility(site_sync_enabled) + self._set_sitesync_visibility(sitesync_enabled) self._selected_project_name = project_name self._selected_folder_ids = event["folder_ids"] self._refresh_model() diff --git a/client/ayon_core/tools/loader/ui/repres_widget.py b/client/ayon_core/tools/loader/ui/repres_widget.py index 3b6b8f94bf..d19ad306a3 100644 --- a/client/ayon_core/tools/loader/ui/repres_widget.py +++ b/client/ayon_core/tools/loader/ui/repres_widget.py @@ -307,8 +307,8 @@ class RepresentationsWidget(QtWidgets.QWidget): self._repre_model = repre_model self._repre_proxy_model = repre_proxy_model - self._set_site_sync_visibility( - self._controller.is_site_sync_enabled() + self._set_sitesync_visibility( + self._controller.is_sitesync_enabled() ) self._set_multiple_folders_selected(False) @@ -320,19 +320,19 @@ class RepresentationsWidget(QtWidgets.QWidget): def _on_project_change(self, event): self._selected_project_name = event["project_name"] - site_sync_enabled = self._controller.is_site_sync_enabled( + sitesync_enabled = self._controller.is_sitesync_enabled( self._selected_project_name ) - self._set_site_sync_visibility(site_sync_enabled) + self._set_sitesync_visibility(sitesync_enabled) - def _set_site_sync_visibility(self, site_sync_enabled): + def _set_sitesync_visibility(self, sitesync_enabled): self._repre_view.setColumnHidden( self._repre_model.active_site_column, - not site_sync_enabled + not sitesync_enabled ) self._repre_view.setColumnHidden( self._repre_model.remote_site_column, - not site_sync_enabled + not sitesync_enabled ) def _set_multiple_folders_selected(self, selected_multiple_folders): diff --git a/client/ayon_core/tools/sceneinventory/control.py b/client/ayon_core/tools/sceneinventory/control.py index 77f4d60b22..592113455c 100644 --- a/client/ayon_core/tools/sceneinventory/control.py +++ b/client/ayon_core/tools/sceneinventory/control.py @@ -28,7 +28,7 @@ class SceneInventoryController: self._current_folder_id = None self._current_folder_set = False - self._site_sync_model = SiteSyncModel(self) + self._sitesync_model = SiteSyncModel(self) # Switch dialog requirements self._hierarchy_model = HierarchyModel(self) self._event_system = self._create_event_system() @@ -47,7 +47,7 @@ class SceneInventoryController: self._current_folder_id = None self._current_folder_set = False - self._site_sync_model.reset() + self._sitesync_model.reset() self._hierarchy_model.reset() def get_current_context(self): @@ -89,22 +89,22 @@ class SceneInventoryController: return [] # Site Sync methods - def is_sync_server_enabled(self): - return self._site_sync_model.is_sync_server_enabled() + def is_sitesync_enabled(self): + return self._sitesync_model.is_sitesync_enabled() def get_sites_information(self): - return self._site_sync_model.get_sites_information() + return self._sitesync_model.get_sites_information() def get_site_provider_icons(self): - return self._site_sync_model.get_site_provider_icons() + return self._sitesync_model.get_site_provider_icons() def get_representations_site_progress(self, representation_ids): - return self._site_sync_model.get_representations_site_progress( + return self._sitesync_model.get_representations_site_progress( representation_ids ) def resync_representations(self, representation_ids, site_type): - return self._site_sync_model.resync_representations( + return self._sitesync_model.resync_representations( representation_ids, site_type ) diff --git a/client/ayon_core/tools/sceneinventory/models/__init__.py b/client/ayon_core/tools/sceneinventory/models/__init__.py index c861d3c1a0..f840a45aa8 100644 --- a/client/ayon_core/tools/sceneinventory/models/__init__.py +++ b/client/ayon_core/tools/sceneinventory/models/__init__.py @@ -1,4 +1,4 @@ -from .site_sync import SiteSyncModel +from .sitesync import SiteSyncModel __all__ = ( diff --git a/client/ayon_core/tools/sceneinventory/models/site_sync.py b/client/ayon_core/tools/sceneinventory/models/sitesync.py similarity index 75% rename from client/ayon_core/tools/sceneinventory/models/site_sync.py rename to client/ayon_core/tools/sceneinventory/models/sitesync.py index 7f09f2b25b..1a1f08bf02 100644 --- a/client/ayon_core/tools/sceneinventory/models/site_sync.py +++ b/client/ayon_core/tools/sceneinventory/models/sitesync.py @@ -9,30 +9,30 @@ class SiteSyncModel: def __init__(self, controller): self._controller = controller - self._sync_server_module = NOT_SET - self._sync_server_enabled = None + self._sitesync_addon = NOT_SET + self._sitesync_enabled = None self._active_site = NOT_SET self._remote_site = NOT_SET self._active_site_provider = NOT_SET self._remote_site_provider = NOT_SET def reset(self): - self._sync_server_module = NOT_SET - self._sync_server_enabled = None + self._sitesync_addon = NOT_SET + self._sitesync_enabled = None self._active_site = NOT_SET self._remote_site = NOT_SET self._active_site_provider = NOT_SET self._remote_site_provider = NOT_SET - def is_sync_server_enabled(self): + def is_sitesync_enabled(self): """Site sync is enabled. Returns: bool: Is enabled or not. """ - self._cache_sync_server_module() - return self._sync_server_enabled + self._cache_sitesync_addon() + return self._sitesync_enabled def get_site_provider_icons(self): """Icon paths per provider. @@ -41,10 +41,10 @@ class SiteSyncModel: dict[str, str]: Path by provider name. """ - if not self.is_sync_server_enabled(): + if not self.is_sitesync_enabled(): return {} - site_sync_addon = self._get_sync_server_module() - return site_sync_addon.get_site_icons() + sitesync_addon = self._get_sitesync_addon() + return sitesync_addon.get_site_icons() def get_sites_information(self): return { @@ -65,11 +65,11 @@ class SiteSyncModel: } for repre_id in representation_ids } - if not self.is_sync_server_enabled(): + if not self.is_sitesync_enabled(): return output project_name = self._controller.get_current_project_name() - site_sync = self._get_sync_server_module() + sitesync_addon = self._get_sitesync_addon() repre_entities = ayon_api.get_representations( project_name, representation_ids ) @@ -78,7 +78,7 @@ class SiteSyncModel: for repre_entity in repre_entities: repre_output = output[repre_entity["id"]] - result = site_sync.get_progress_for_repre( + result = sitesync_addon.get_progress_for_repre( repre_entity, active_site, remote_site ) repre_output["active_site"] = result[active_site] @@ -95,7 +95,7 @@ class SiteSyncModel: """ project_name = self._controller.get_current_project_name() - site_sync = self._get_sync_server_module() + sitesync_addon = self._get_sitesync_addon() active_site = self._get_active_site() remote_site = self._get_remote_site() progress = self.get_representations_site_progress( @@ -115,22 +115,22 @@ class SiteSyncModel: site = remote_site if check_progress == 1: - site_sync.add_site( + sitesync_addon.add_site( project_name, repre_id, site, force=True ) - def _get_sync_server_module(self): - self._cache_sync_server_module() - return self._sync_server_module + def _get_sitesync_addon(self): + self._cache_sitesync_addon() + return self._sitesync_addon - def _cache_sync_server_module(self): - if self._sync_server_module is not NOT_SET: - return self._sync_server_module + def _cache_sitesync_addon(self): + if self._sitesync_addon is not NOT_SET: + return self._sitesync_addon manager = AddonsManager() - site_sync = manager.get("sync_server") - sync_enabled = site_sync is not None and site_sync.enabled - self._sync_server_module = site_sync - self._sync_server_enabled = sync_enabled + sitesync_addon = manager.get("sitesync") + sync_enabled = sitesync_addon is not None and sitesync_addon.enabled + self._sitesync_addon = sitesync_addon + self._sitesync_enabled = sync_enabled def _get_active_site(self): if self._active_site is NOT_SET: @@ -157,19 +157,19 @@ class SiteSyncModel: remote_site = None active_site_provider = None remote_site_provider = None - if self.is_sync_server_enabled(): - site_sync = self._get_sync_server_module() + if self.is_sitesync_enabled(): + sitesync_addon = self._get_sitesync_addon() project_name = self._controller.get_current_project_name() - active_site = site_sync.get_active_site(project_name) - remote_site = site_sync.get_remote_site(project_name) + active_site = sitesync_addon.get_active_site(project_name) + remote_site = sitesync_addon.get_remote_site(project_name) active_site_provider = "studio" remote_site_provider = "studio" if active_site != "studio": - active_site_provider = site_sync.get_provider_for_site( + active_site_provider = sitesync_addon.get_provider_for_site( project_name, active_site ) if remote_site != "studio": - remote_site_provider = site_sync.get_provider_for_site( + remote_site_provider = sitesync_addon.get_provider_for_site( project_name, remote_site ) diff --git a/client/ayon_core/tools/sceneinventory/view.py b/client/ayon_core/tools/sceneinventory/view.py index d576bdc139..5cbd4daf70 100644 --- a/client/ayon_core/tools/sceneinventory/view.py +++ b/client/ayon_core/tools/sceneinventory/view.py @@ -311,9 +311,9 @@ class SceneInventoryView(QtWidgets.QTreeView): menu.addAction(remove_action) - self._handle_sync_server(menu, repre_ids) + self._handle_sitesync(menu, repre_ids) - def _handle_sync_server(self, menu, repre_ids): + def _handle_sitesync(self, menu, repre_ids): """Adds actions for download/upload when SyncServer is enabled Args: @@ -324,7 +324,7 @@ class SceneInventoryView(QtWidgets.QTreeView): (OptionMenu) """ - if not self._controller.is_sync_server_enabled(): + if not self._controller.is_sitesync_enabled(): return menu.addSeparator() diff --git a/client/ayon_core/tools/sceneinventory/window.py b/client/ayon_core/tools/sceneinventory/window.py index 9584524edd..555db3a17c 100644 --- a/client/ayon_core/tools/sceneinventory/window.py +++ b/client/ayon_core/tools/sceneinventory/window.py @@ -70,7 +70,7 @@ class SceneInventoryWindow(QtWidgets.QDialog): view = SceneInventoryView(controller, self) view.setModel(proxy) - sync_enabled = controller.is_sync_server_enabled() + sync_enabled = controller.is_sitesync_enabled() view.setColumnHidden(model.active_site_col, not sync_enabled) view.setColumnHidden(model.remote_site_col, not sync_enabled) diff --git a/client/ayon_core/tools/workfiles/control.py b/client/ayon_core/tools/workfiles/control.py index 7fa7af1662..3048e6be94 100644 --- a/client/ayon_core/tools/workfiles/control.py +++ b/client/ayon_core/tools/workfiles/control.py @@ -659,16 +659,7 @@ class BaseWorkfileController( folder_id != self.get_current_folder_id() or task_name != self.get_current_task_name() ): - folder_entity = ayon_api.get_folder_by_id( - event_data["project_name"], - event_data["folder_id"], - ) - task_entity = ayon_api.get_task_by_name( - event_data["project_name"], - event_data["folder_id"], - event_data["task_name"] - ) - change_current_context(folder_entity, task_entity) + self._change_current_context(project_name, folder_id, task_id) self._host_open_workfile(filepath) @@ -710,16 +701,8 @@ class BaseWorkfileController( folder_id != self.get_current_folder_id() or task_name != self.get_current_task_name() ): - folder_entity = ayon_api.get_folder_by_id( - project_name, folder["id"] - ) - task_entity = ayon_api.get_task_by_name( - project_name, folder["id"], task_name - ) - change_current_context( - folder_entity, - task_entity, - template_key=template_key + self._change_current_context( + project_name, folder_id, task_id, template_key ) # Save workfile @@ -744,4 +727,18 @@ class BaseWorkfileController( # Trigger after save events emit_event("workfile.save.after", event_data, source="workfiles.tool") - self.reset() + + def _change_current_context( + self, project_name, folder_id, task_id, template_key=None + ): + # Change current context + folder_entity = self.get_folder_entity(project_name, folder_id) + task_entity = self.get_task_entity(project_name, task_id) + change_current_context( + folder_entity, + task_entity, + template_key=template_key + ) + self._current_folder_id = folder_entity["id"] + self._current_folder_path = folder_entity["path"] + self._current_task_name = task_entity["name"] diff --git a/client/ayon_core/version.py b/client/ayon_core/version.py index f3ad9713d5..a60de0493a 100644 --- a/client/ayon_core/version.py +++ b/client/ayon_core/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring AYON core addon version.""" -__version__ = "0.3.0-dev.1" +__version__ = "0.3.1-dev.1" diff --git a/client/ayon_core/widgets/__init__.py b/client/ayon_core/widgets/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/client/ayon_core/widgets/password_dialog.py b/client/ayon_core/widgets/password_dialog.py deleted file mode 100644 index a4c50128ff..0000000000 --- a/client/ayon_core/widgets/password_dialog.py +++ /dev/null @@ -1,33 +0,0 @@ -# TODO remove - kept for kitsu addon which imported it -from qtpy import QtWidgets, QtCore, QtGui - - -class PressHoverButton(QtWidgets.QPushButton): - """ - Deprecated: - Use `openpype.tools.utils.PressHoverButton` instead. - """ - _mouse_pressed = False - _mouse_hovered = False - change_state = QtCore.Signal(bool) - - def mousePressEvent(self, event): - self._mouse_pressed = True - self._mouse_hovered = True - self.change_state.emit(self._mouse_hovered) - super(PressHoverButton, self).mousePressEvent(event) - - def mouseReleaseEvent(self, event): - self._mouse_pressed = False - self._mouse_hovered = False - self.change_state.emit(self._mouse_hovered) - super(PressHoverButton, self).mouseReleaseEvent(event) - - def mouseMoveEvent(self, event): - mouse_pos = self.mapFromGlobal(QtGui.QCursor.pos()) - under_mouse = self.rect().contains(mouse_pos) - if under_mouse != self._mouse_hovered: - self._mouse_hovered = under_mouse - self.change_state.emit(self._mouse_hovered) - - super(PressHoverButton, self).mouseMoveEvent(event) diff --git a/client/pyproject.toml b/client/pyproject.toml index 7b4329a31a..1a0ad7e5f2 100644 --- a/client/pyproject.toml +++ b/client/pyproject.toml @@ -4,19 +4,19 @@ description="AYON core addon." [tool.poetry.dependencies] python = ">=3.9.1,<3.10" -aiohttp_json_rpc = "*" # TVPaint server -aiohttp-middlewares = "^2.0.0" -wsrpc_aiohttp = "^3.1.1" # websocket server -Click = "^8" clique = "1.6.*" jsonschema = "^2.6.0" pyblish-base = "^1.8.11" -pynput = "^1.7.2" # Timers manager - TODO remove speedcopy = "^2.1" six = "^1.15" qtawesome = "0.7.3" [ayon.runtimeDependencies] +aiohttp_json_rpc = "*" # TVPaint server +aiohttp-middlewares = "^2.0.0" +wsrpc_aiohttp = "^3.1.1" # websocket server +Click = "^8" OpenTimelineIO = "0.14.1" opencolorio = "2.2.1" Pillow = "9.5.0" +pynput = "^1.7.2" # Timers manager - TODO remove diff --git a/package.py b/package.py index 470bbf256b..79450d029f 100644 --- a/package.py +++ b/package.py @@ -1,6 +1,6 @@ name = "core" title = "Core" -version = "0.3.0-dev.1" +version = "0.3.1-dev.1" client_dir = "ayon_core" diff --git a/pyproject.toml b/pyproject.toml index ee124ddc2d..3c9ff4ea0a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ [tool.poetry] name = "ayon-core" -version = "0.3.0" +version = "0.3.1" description = "" authors = ["Ynput Team "] readme = "README.md" @@ -92,7 +92,7 @@ line-ending = "auto" [tool.codespell] # Ignore words that are not in the dictionary. -ignore-words-list = "ayon,ynput" +ignore-words-list = "ayon,ynput,parms,parm,hda" skip = "./.*,./package/*,*/vendor/*,*/unreal/integration/*,*/aftereffects/api/extension/js/libs/*" count = true quiet-level = 3 diff --git a/server_addon/applications/server/__init__.py b/server_addon/applications/server/__init__.py index d5c2de3df3..2668589cbe 100644 --- a/server_addon/applications/server/__init__.py +++ b/server_addon/applications/server/__init__.py @@ -3,6 +3,7 @@ import json import copy from ayon_server.addons import BaseServerAddon, AddonLibrary +from ayon_server.entities.core import attribute_library from ayon_server.lib.postgres import Postgres from .version import __version__ @@ -118,9 +119,28 @@ class ApplicationsAddon(BaseServerAddon): ) async def setup(self): - need_restart = await self.create_applications_attribute() + need_restart = await self.create_required_attributes() if need_restart: self.request_server_restart() + await self._update_enums() + + def _get_applications_def(self): + return { + "name": "applications", + "type": "list_of_strings", + "title": "Applications", + "scope": ["project"], + "enum":[], + } + + def _get_tools_def(self): + return { + "name": "tools", + "type": "list_of_strings", + "title": "Tools", + "scope": ["project", "folder", "task"], + "enum":[], + } async def create_applications_attribute(self) -> bool: """Make sure there are required attributes which ftrack addon needs. @@ -129,6 +149,73 @@ class ApplicationsAddon(BaseServerAddon): bool: 'True' if an attribute was created or updated. """ + need_restart = await self.create_required_attributes() + await self._update_enums() + return need_restart + + async def create_required_attributes(self) -> bool: + """Make sure there are required 'applications' and 'tools' attributes. + This only checks for the existence of the attributes, it does not populate + them with any data. When an attribute is added, server needs to be restarted, + while adding enum data to the attribute does not require a restart. + Returns: + bool: 'True' if an attribute was created or updated. + """ + + # keep track of the last attribute position (for adding new attributes) + apps_attribute_data = self._get_applications_def() + tools_attribute_data = self._get_tools_def() + + apps_attrib_name = apps_attribute_data["name"] + tools_attrib_name = tools_attribute_data["name"] + + async with Postgres.acquire() as conn, conn.transaction(): + query = "SELECT BOOL_OR(name = 'applications') AS has_applications, BOOL_OR(name = 'tools') AS has_tools FROM attributes;" + result = (await conn.fetch(query))[0] + + attributes_to_create = {} + if not result["has_applications"]: + attributes_to_create[apps_attrib_name] = { + "scope": apps_attribute_data["scope"], + "data": { + "title": apps_attribute_data["title"], + "type": apps_attribute_data["type"], + "enum": [], + } + } + + if not result["has_tools"]: + attributes_to_create[tools_attrib_name] = { + "scope": tools_attribute_data["scope"], + "data": { + "title": tools_attribute_data["title"], + "type": tools_attribute_data["type"], + "enum": [], + }, + } + + needs_restart = False + # when any of the required attributes are not present, add them + # and return 'True' to indicate that server needs to be restarted + for name, payload in attributes_to_create.items(): + insert_query = "INSERT INTO attributes (name, scope, data, position) VALUES ($1, $2, $3, (SELECT COALESCE(MAX(position), 0) + 1 FROM attributes)) ON CONFLICT DO NOTHING" + await conn.execute( + insert_query, + name, + payload["scope"], + payload["data"], + ) + needs_restart = True + + return needs_restart + + async def _update_enums(self): + """Updates applications and tools enums based on the addon settings. + This method is called when the addon is started (after we are sure that the + 'applications' and 'tools' attributes exist) and when the addon settings are + updated (using on_settings_updated method). + """ + instance = AddonLibrary.getinstance() app_defs = instance.data.get(self.name) all_applications = [] @@ -148,33 +235,32 @@ class ApplicationsAddon(BaseServerAddon): merge_groups(all_applications, app_groups) merge_groups(all_tools, studio_settings["tool_groups"]) - query = "SELECT name, position, scope, data from public.attributes" - apps_attrib_name = "applications" tools_attrib_name = "tools" apps_enum = get_enum_items_from_groups(all_applications) tools_enum = get_enum_items_from_groups(all_tools) + apps_attribute_data = { "type": "list_of_strings", "title": "Applications", - "enum": apps_enum + "enum": apps_enum, } tools_attribute_data = { "type": "list_of_strings", "title": "Tools", - "enum": tools_enum + "enum": tools_enum, } + apps_scope = ["project"] tools_scope = ["project", "folder", "task"] - apps_match_position = None apps_matches = False - tools_match_position = None tools_matches = False - position = 1 - async for row in Postgres.iterate(query): - position += 1 + + async for row in Postgres.iterate( + "SELECT name, position, scope, data from public.attributes" + ): if row["name"] == apps_attrib_name: # Check if scope is matching ftrack addon requirements if ( @@ -182,7 +268,6 @@ class ApplicationsAddon(BaseServerAddon): and row["data"].get("enum") == apps_enum ): apps_matches = True - apps_match_position = row["position"] elif row["name"] == tools_attrib_name: if ( @@ -190,45 +275,41 @@ class ApplicationsAddon(BaseServerAddon): and row["data"].get("enum") == tools_enum ): tools_matches = True - tools_match_position = row["position"] if apps_matches and tools_matches: - return False + return - postgre_query = "\n".join(( - "INSERT INTO public.attributes", - " (name, position, scope, data)", - "VALUES", - " ($1, $2, $3, $4)", - "ON CONFLICT (name)", - "DO UPDATE SET", - " scope = $3,", - " data = $4", - )) if not apps_matches: - # Reuse position from found attribute - if apps_match_position is None: - apps_match_position = position - position += 1 - await Postgres.execute( - postgre_query, - apps_attrib_name, - apps_match_position, + """ + UPDATE attributes SET + scope = $1, + data = $2 + WHERE + name = $3 + """, apps_scope, apps_attribute_data, + apps_attrib_name, ) if not tools_matches: - if tools_match_position is None: - tools_match_position = position - position += 1 - await Postgres.execute( - postgre_query, - tools_attrib_name, - tools_match_position, + """ + UPDATE attributes SET + scope = $1, + data = $2 + WHERE + name = $3 + """, tools_scope, tools_attribute_data, + tools_attrib_name, ) - return True + + # Reset attributes cache on server + await attribute_library.load() + + async def on_settings_changed(self, *args, **kwargs): + _ = args, kwargs + await self._update_enums() diff --git a/server_addon/applications/server/version.py b/server_addon/applications/server/version.py index 9cb17e7976..c11f861afb 100644 --- a/server_addon/applications/server/version.py +++ b/server_addon/applications/server/version.py @@ -1 +1 @@ -__version__ = "0.1.8" +__version__ = "0.1.9" diff --git a/server_addon/blender/server/settings/main.py b/server_addon/blender/server/settings/main.py index aed9b5632d..3cca22ae3b 100644 --- a/server_addon/blender/server/settings/main.py +++ b/server_addon/blender/server/settings/main.py @@ -6,7 +6,7 @@ from ayon_server.settings import ( from .imageio import BlenderImageIOModel from .publish_plugins import ( - PublishPuginsModel, + PublishPluginsModel, DEFAULT_BLENDER_PUBLISH_SETTINGS ) from .render_settings import ( @@ -47,8 +47,8 @@ class BlenderSettings(BaseSettingsModel): default_factory=TemplateWorkfileBaseOptions, title="Workfile Builder" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish Plugins" ) diff --git a/server_addon/blender/server/settings/publish_plugins.py b/server_addon/blender/server/settings/publish_plugins.py index c742fdc5bd..e998d7b057 100644 --- a/server_addon/blender/server/settings/publish_plugins.py +++ b/server_addon/blender/server/settings/publish_plugins.py @@ -66,7 +66,7 @@ class ExtractPlayblastModel(BaseSettingsModel): return validate_json_dict(value) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): ValidateCameraZeroKeyframe: ValidatePluginModel = SettingsField( default_factory=ValidatePluginModel, title="Validate Camera Zero Keyframe", diff --git a/server_addon/celaction/server/settings.py b/server_addon/celaction/server/settings.py index 9208948a07..afa9773477 100644 --- a/server_addon/celaction/server/settings.py +++ b/server_addon/celaction/server/settings.py @@ -42,7 +42,7 @@ class WorkfileModel(BaseSettingsModel): ) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectRenderPath: CollectRenderPathModel = SettingsField( default_factory=CollectRenderPathModel, title="Collect Render Path" @@ -57,8 +57,8 @@ class CelActionSettings(BaseSettingsModel): workfile: WorkfileModel = SettingsField( title="Workfile" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish plugins", ) diff --git a/server_addon/flame/server/settings/create_plugins.py b/server_addon/flame/server/settings/create_plugins.py index 44fb8a2e91..2f17ec40c4 100644 --- a/server_addon/flame/server/settings/create_plugins.py +++ b/server_addon/flame/server/settings/create_plugins.py @@ -87,7 +87,7 @@ class CreateShotClipModel(BaseSettingsModel): ) -class CreatePuginsModel(BaseSettingsModel): +class CreatePluginsModel(BaseSettingsModel): CreateShotClip: CreateShotClipModel = SettingsField( default_factory=CreateShotClipModel, title="Create Shot Clip" diff --git a/server_addon/flame/server/settings/main.py b/server_addon/flame/server/settings/main.py index 047f5af287..c838ee9646 100644 --- a/server_addon/flame/server/settings/main.py +++ b/server_addon/flame/server/settings/main.py @@ -1,8 +1,8 @@ from ayon_server.settings import BaseSettingsModel, SettingsField from .imageio import FlameImageIOModel, DEFAULT_IMAGEIO_SETTINGS -from .create_plugins import CreatePuginsModel, DEFAULT_CREATE_SETTINGS -from .publish_plugins import PublishPuginsModel, DEFAULT_PUBLISH_SETTINGS +from .create_plugins import CreatePluginsModel, DEFAULT_CREATE_SETTINGS +from .publish_plugins import PublishPluginsModel, DEFAULT_PUBLISH_SETTINGS from .loader_plugins import LoaderPluginsModel, DEFAULT_LOADER_SETTINGS @@ -11,12 +11,12 @@ class FlameSettings(BaseSettingsModel): default_factory=FlameImageIOModel, title="Color Management (ImageIO)" ) - create: CreatePuginsModel = SettingsField( - default_factory=CreatePuginsModel, + create: CreatePluginsModel = SettingsField( + default_factory=CreatePluginsModel, title="Create plugins" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish plugins" ) load: LoaderPluginsModel = SettingsField( diff --git a/server_addon/flame/server/settings/publish_plugins.py b/server_addon/flame/server/settings/publish_plugins.py index decb00fcfa..b34083b4e2 100644 --- a/server_addon/flame/server/settings/publish_plugins.py +++ b/server_addon/flame/server/settings/publish_plugins.py @@ -121,7 +121,7 @@ class IntegrateBatchGroupModel(BaseSettingsModel): ) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectTimelineInstances: CollectTimelineInstancesModel = SettingsField( default_factory=CollectTimelineInstancesModel, title="Collect Timeline Instances" diff --git a/server_addon/fusion/server/settings.py b/server_addon/fusion/server/settings.py index a913db16da..f16ae6e3e7 100644 --- a/server_addon/fusion/server/settings.py +++ b/server_addon/fusion/server/settings.py @@ -75,6 +75,12 @@ class HooksModel(BaseSettingsModel): default_factory=HookOptionalModel, title="Install PySide2" ) + FusionLaunchMenuHook: HookOptionalModel = SettingsField( + default_factory=HookOptionalModel, + title="Launch AYON Menu on Fusion Start", + description="Launch the AYON menu on Fusion application startup. " + "This is only supported for Fusion 18+" + ) class CreateSaverModel(CreateSaverPluginModel): @@ -143,6 +149,9 @@ DEFAULT_VALUES = { "hooks": { "InstallPySideToFusion": { "enabled": True + }, + "FusionLaunchMenuHook": { + "enabled": False } }, "create": { diff --git a/server_addon/fusion/server/version.py b/server_addon/fusion/server/version.py index bbab0242f6..1276d0254f 100644 --- a/server_addon/fusion/server/version.py +++ b/server_addon/fusion/server/version.py @@ -1 +1 @@ -__version__ = "0.1.4" +__version__ = "0.1.5" diff --git a/server_addon/hiero/server/settings/loader_plugins.py b/server_addon/hiero/server/settings/loader_plugins.py index b5a81d1ae2..682f9fd2d9 100644 --- a/server_addon/hiero/server/settings/loader_plugins.py +++ b/server_addon/hiero/server/settings/loader_plugins.py @@ -15,7 +15,7 @@ class LoadClipModel(BaseSettingsModel): ) -class LoaderPuginsModel(BaseSettingsModel): +class LoaderPluginsModel(BaseSettingsModel): LoadClip: LoadClipModel = SettingsField( default_factory=LoadClipModel, title="Load Clip" diff --git a/server_addon/hiero/server/settings/main.py b/server_addon/hiero/server/settings/main.py index b170ecafb8..378af6a539 100644 --- a/server_addon/hiero/server/settings/main.py +++ b/server_addon/hiero/server/settings/main.py @@ -9,11 +9,11 @@ from .create_plugins import ( DEFAULT_CREATE_SETTINGS ) from .loader_plugins import ( - LoaderPuginsModel, + LoaderPluginsModel, DEFAULT_LOADER_PLUGINS_SETTINGS ) from .publish_plugins import ( - PublishPuginsModel, + PublishPluginsModel, DEFAULT_PUBLISH_PLUGIN_SETTINGS ) from .scriptsmenu import ( @@ -35,12 +35,12 @@ class HieroSettings(BaseSettingsModel): default_factory=CreatorPluginsSettings, title="Creator Plugins", ) - load: LoaderPuginsModel = SettingsField( - default_factory=LoaderPuginsModel, + load: LoaderPluginsModel = SettingsField( + default_factory=LoaderPluginsModel, title="Loader plugins" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish plugins" ) scriptsmenu: ScriptsmenuSettings = SettingsField( diff --git a/server_addon/hiero/server/settings/publish_plugins.py b/server_addon/hiero/server/settings/publish_plugins.py index c35c61c332..0e43d4ce3a 100644 --- a/server_addon/hiero/server/settings/publish_plugins.py +++ b/server_addon/hiero/server/settings/publish_plugins.py @@ -49,7 +49,7 @@ class ExtractReviewCutUpVideoModel(BaseSettingsModel): ) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectInstanceVersion: CollectInstanceVersionModel = SettingsField( default_factory=CollectInstanceVersionModel, title="Collect Instance Version" diff --git a/server_addon/maya/server/settings/publishers.py b/server_addon/maya/server/settings/publishers.py index f1e63f36be..27288053a2 100644 --- a/server_addon/maya/server/settings/publishers.py +++ b/server_addon/maya/server/settings/publishers.py @@ -316,6 +316,12 @@ class ExtractObjModel(BaseSettingsModel): optional: bool = SettingsField(title="Optional") +class ExtractModelModel(BaseSettingsModel): + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") + + class ExtractMayaSceneRawModel(BaseSettingsModel): """Add loaded instances to those published families:""" enabled: bool = SettingsField(title="ExtractMayaSceneRaw") @@ -372,7 +378,9 @@ class ExtractLookModel(BaseSettingsModel): class ExtractGPUCacheModel(BaseSettingsModel): - enabled: bool = True + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") families: list[str] = SettingsField(default_factory=list, title="Families") step: float = SettingsField(1.0, ge=1.0, title="Step") stepSave: int = SettingsField(1, ge=1, title="Step Save") @@ -799,6 +807,10 @@ class PublishersModel(BaseSettingsModel): default_factory=ExtractGPUCacheModel, title="Extract GPU Cache", ) + ExtractModel: ExtractModelModel = SettingsField( + default_factory=ExtractModelModel, + title="Extract Model (Maya Scene)" + ) DEFAULT_SUFFIX_NAMING = { @@ -1341,6 +1353,8 @@ DEFAULT_PUBLISH_SETTINGS = { }, "ExtractGPUCache": { "enabled": False, + "optional": False, + "active": True, "families": [ "model", "animation", @@ -1353,5 +1367,10 @@ DEFAULT_PUBLISH_SETTINGS = { "optimizeAnimationsForMotionBlur": True, "writeMaterials": True, "useBaseTessellation": True + }, + "ExtractModel": { + "enabled": True, + "optional": True, + "active": True, } } diff --git a/server_addon/maya/server/version.py b/server_addon/maya/server/version.py index 71b4bc4ca6..1a4f79a972 100644 --- a/server_addon/maya/server/version.py +++ b/server_addon/maya/server/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring addon version.""" -__version__ = "0.1.12" +__version__ = "0.1.13" diff --git a/server_addon/nuke/server/settings/loader_plugins.py b/server_addon/nuke/server/settings/loader_plugins.py index a5c3315fd4..531ea8d986 100644 --- a/server_addon/nuke/server/settings/loader_plugins.py +++ b/server_addon/nuke/server/settings/loader_plugins.py @@ -42,7 +42,7 @@ class LoadClipModel(BaseSettingsModel): ) -class LoaderPuginsModel(BaseSettingsModel): +class LoaderPluginsModel(BaseSettingsModel): LoadImage: LoadImageModel = SettingsField( default_factory=LoadImageModel, title="Load Image" diff --git a/server_addon/nuke/server/settings/main.py b/server_addon/nuke/server/settings/main.py index 936686d6ce..1fd347cc21 100644 --- a/server_addon/nuke/server/settings/main.py +++ b/server_addon/nuke/server/settings/main.py @@ -28,11 +28,11 @@ from .create_plugins import ( DEFAULT_CREATE_SETTINGS ) from .publish_plugins import ( - PublishPuginsModel, + PublishPluginsModel, DEFAULT_PUBLISH_PLUGIN_SETTINGS ) from .loader_plugins import ( - LoaderPuginsModel, + LoaderPluginsModel, DEFAULT_LOADER_PLUGINS_SETTINGS ) from .workfile_builder import ( @@ -75,13 +75,13 @@ class NukeSettings(BaseSettingsModel): title="Creator Plugins", ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish Plugins", ) - load: LoaderPuginsModel = SettingsField( - default_factory=LoaderPuginsModel, + load: LoaderPluginsModel = SettingsField( + default_factory=LoaderPluginsModel, title="Loader Plugins", ) diff --git a/server_addon/nuke/server/settings/publish_plugins.py b/server_addon/nuke/server/settings/publish_plugins.py index 7d9c914fee..d5b05d8715 100644 --- a/server_addon/nuke/server/settings/publish_plugins.py +++ b/server_addon/nuke/server/settings/publish_plugins.py @@ -219,7 +219,7 @@ class IncrementScriptVersionModel(BaseSettingsModel): active: bool = SettingsField(title="Active") -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectInstanceData: CollectInstanceDataModel = SettingsField( title="Collect Instance Version", default_factory=CollectInstanceDataModel, diff --git a/server_addon/resolve/server/settings.py b/server_addon/resolve/server/settings.py index dcdb2f1b27..d9cbb98340 100644 --- a/server_addon/resolve/server/settings.py +++ b/server_addon/resolve/server/settings.py @@ -69,7 +69,7 @@ class CreateShotClipModels(BaseSettingsModel): ) -class CreatorPuginsModel(BaseSettingsModel): +class CreatorPluginsModel(BaseSettingsModel): CreateShotClip: CreateShotClipModels = SettingsField( default_factory=CreateShotClipModels, title="Create Shot Clip" @@ -84,8 +84,8 @@ class ResolveSettings(BaseSettingsModel): default_factory=ResolveImageIOModel, title="Color Management (ImageIO)" ) - create: CreatorPuginsModel = SettingsField( - default_factory=CreatorPuginsModel, + create: CreatorPluginsModel = SettingsField( + default_factory=CreatorPluginsModel, title="Creator plugins", ) diff --git a/server_addon/traypublisher/server/settings/simple_creators.py b/server_addon/traypublisher/server/settings/simple_creators.py index 924eeedd23..6b979bbe52 100644 --- a/server_addon/traypublisher/server/settings/simple_creators.py +++ b/server_addon/traypublisher/server/settings/simple_creators.py @@ -142,6 +142,7 @@ DEFAULT_SIMPLE_CREATORS = [ "extensions": [ ".exr", ".png", + ".dng", ".dpx", ".jpg", ".tiff", @@ -165,6 +166,7 @@ DEFAULT_SIMPLE_CREATORS = [ "extensions": [ ".exr", ".png", + ".dng", ".dpx", ".jpg", ".jpeg", @@ -215,6 +217,7 @@ DEFAULT_SIMPLE_CREATORS = [ ".exr", ".jpg", ".jpeg", + ".dng", ".dpx", ".bmp", ".tif", diff --git a/server_addon/traypublisher/server/version.py b/server_addon/traypublisher/server/version.py index e57ad00718..de699158fd 100644 --- a/server_addon/traypublisher/server/version.py +++ b/server_addon/traypublisher/server/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring addon version.""" -__version__ = "0.1.3" +__version__ = "0.1.4"