diff --git a/client/ayon_core/addon/base.py b/client/ayon_core/addon/base.py index 6bac25b8ac..3d028dba07 100644 --- a/client/ayon_core/addon/base.py +++ b/client/ayon_core/addon/base.py @@ -15,7 +15,9 @@ from abc import ABCMeta, abstractmethod import six import appdirs import ayon_api +from semver import VersionInfo +from ayon_core import AYON_CORE_ROOT from ayon_core.lib import Logger, is_dev_mode_enabled from ayon_core.settings import get_studio_settings @@ -45,6 +47,11 @@ IGNORED_HOSTS_IN_AYON = { } IGNORED_MODULES_IN_AYON = set() +# When addon was moved from ayon-core codebase +# - this is used to log the missing addon +MOVED_ADDON_MILESTONE_VERSIONS = { + "applications": VersionInfo(2, 0, 0), +} # Inherit from `object` for Python 2 hosts class _ModuleClass(object): @@ -191,6 +198,45 @@ def _get_ayon_addons_information(bundle_info): return output +def _handle_moved_addons(addon_name, milestone_version, log): + """Log message that addon version is not compatible with current core. + + The function can return path to addon client code, but that can happen + only if ayon-core is used from code (for development), but still + logs a warning. + + Args: + addon_name (str): Addon name. + milestone_version (str): Milestone addon version. + log (logging.Logger): Logger object. + + Returns: + Union[str, None]: Addon dir or None. + """ + # Handle addons which were moved out of ayon-core + # - Try to fix it by loading it directly from server addons dir in + # ayon-core repository. But that will work only if ayon-core is + # used from code. + addon_dir = os.path.join( + os.path.dirname(os.path.dirname(AYON_CORE_ROOT)), + "server_addon", + addon_name, + "client", + ) + if not os.path.exists(addon_dir): + log.error(( + "Addon '{}' is not be available." + " Please update applications addon to '{}' or higher." + ).format(addon_name, milestone_version)) + return None + + log.warning(( + "Please update '{}' addon to '{}' or higher." + " Using client code from ayon-core repository." + ).format(addon_name, milestone_version)) + return addon_dir + + def _load_ayon_addons(openpype_modules, modules_key, log): """Load AYON addons based on information from server. @@ -248,6 +294,7 @@ def _load_ayon_addons(openpype_modules, modules_key, log): use_dev_path = dev_addon_info.get("enabled", False) addon_dir = None + milestone_version = MOVED_ADDON_MILESTONE_VERSIONS.get(addon_name) if use_dev_path: addon_dir = dev_addon_info["path"] if not addon_dir or not os.path.exists(addon_dir): @@ -256,6 +303,16 @@ def _load_ayon_addons(openpype_modules, modules_key, log): ).format(addon_name, addon_version, addon_dir)) continue + elif ( + milestone_version is not None + and VersionInfo.parse(addon_version) < milestone_version + ): + addon_dir = _handle_moved_addons( + addon_name, milestone_version, log + ) + if not addon_dir: + continue + elif addons_dir_exists: folder_name = "{}_{}".format(addon_name, addon_version) addon_dir = os.path.join(addons_dir, folder_name) @@ -340,9 +397,8 @@ def _load_addons_in_core( ): # Add current directory at first place # - has small differences in import logic - current_dir = os.path.abspath(os.path.dirname(__file__)) - hosts_dir = os.path.join(os.path.dirname(current_dir), "hosts") - modules_dir = os.path.join(os.path.dirname(current_dir), "modules") + hosts_dir = os.path.join(AYON_CORE_ROOT, "hosts") + modules_dir = os.path.join(AYON_CORE_ROOT, "modules") ignored_host_names = set(IGNORED_HOSTS_IN_AYON) ignored_module_dir_filenames = ( diff --git a/client/ayon_core/cli.py b/client/ayon_core/cli.py index 2759b4fccf..bd47dc1aac 100644 --- a/client/ayon_core/cli.py +++ b/client/ayon_core/cli.py @@ -4,6 +4,7 @@ import os import sys import code import traceback +from pathlib import Path import click import acre @@ -11,7 +12,7 @@ import acre from ayon_core import AYON_CORE_ROOT from ayon_core.addon import AddonsManager from ayon_core.settings import get_general_environments -from ayon_core.lib import initialize_ayon_connection +from ayon_core.lib import initialize_ayon_connection, is_running_from_build from .cli_commands import Commands @@ -81,7 +82,7 @@ main_cli.set_alias("addon", "module") @main_cli.command() @click.argument("output_json_path") @click.option("--project", help="Project name", default=None) -@click.option("--asset", help="Asset name", default=None) +@click.option("--asset", help="Folder path", default=None) @click.option("--task", help="Task name", default=None) @click.option("--app", help="Application name", default=None) @click.option( @@ -96,6 +97,10 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup): environments will be extracted. Context options are "project", "asset", "task", "app" + + Deprecated: + This function is deprecated and will be removed in future. Please use + 'addon applications extractenvironments ...' instead. """ Commands.extractenvironments( output_json_path, project, asset, task, app, envgroup @@ -127,7 +132,7 @@ def publish_report_viewer(): @main_cli.command() @click.argument("output_path") @click.option("--project", help="Define project context") -@click.option("--asset", help="Define asset in project (project must be set)") +@click.option("--folder", help="Define folder in project (project must be set)") @click.option( "--strict", is_flag=True, @@ -136,18 +141,18 @@ def publish_report_viewer(): def contextselection( output_path, project, - asset, + folder, strict ): """Show Qt dialog to select context. - Context is project name, asset name and task name. The result is stored + Context is project name, folder path and task name. The result is stored into json file which path is passed in first argument. """ Commands.contextselection( output_path, project, - asset, + folder, strict ) @@ -163,16 +168,27 @@ def run(script): if not script: print("Error: missing path to script file.") + return + + # Remove first argument if it is the same as AYON executable + # - Forward compatibility with future AYON versions. + # - Current AYON launcher keeps the arguments with first argument but + # future versions might remove it. + first_arg = sys.argv[0] + if is_running_from_build(): + comp_path = os.getenv("AYON_EXECUTABLE") else: + comp_path = os.path.join(os.environ["AYON_ROOT"], "start.py") + # Compare paths and remove first argument if it is the same as AYON + if Path(first_arg).resolve() == Path(comp_path).resolve(): + sys.argv.pop(0) - args = sys.argv - args.remove("run") - args.remove(script) - sys.argv = args + # Remove 'run' command from sys.argv + sys.argv.remove("run") - args_string = " ".join(args[1:]) - print(f"... running: {script} {args_string}") - runpy.run_path(script, run_name="__main__", ) + args_string = " ".join(sys.argv[1:]) + print(f"... running: {script} {args_string}") + runpy.run_path(script, run_name="__main__") @main_cli.command() diff --git a/client/ayon_core/cli_commands.py b/client/ayon_core/cli_commands.py index fa90571462..0fb18be687 100644 --- a/client/ayon_core/cli_commands.py +++ b/client/ayon_core/cli_commands.py @@ -2,7 +2,7 @@ """Implementation of AYON commands.""" import os import sys -import json +import warnings class Commands: @@ -57,10 +57,7 @@ class Commands: """ from ayon_core.lib import Logger - from ayon_core.lib.applications import ( - get_app_environments_for_context, - LaunchTypes, - ) + from ayon_core.addon import AddonsManager from ayon_core.pipeline import ( install_ayon_plugins, @@ -68,7 +65,6 @@ class Commands: ) # Register target and host - import pyblish.api import pyblish.util if not isinstance(path, str): @@ -99,15 +95,13 @@ class Commands: for plugin_path in publish_paths: pyblish.api.register_plugin_path(plugin_path) - app_full_name = os.getenv("AYON_APP_NAME") - if app_full_name: + applications_addon = manager.get_enabled_addon("applications") + if applications_addon is not None: context = get_global_context() - env = get_app_environments_for_context( + env = applications_addon.get_farm_publish_environment_variables( context["project_name"], context["folder_path"], context["task_name"], - app_full_name, - launch_type=LaunchTypes.farm_publish, ) os.environ.update(env) @@ -149,36 +143,36 @@ class Commands: log.info("Publish finished.") @staticmethod - def extractenvironments(output_json_path, project, asset, task, app, - env_group): + def extractenvironments( + output_json_path, project, asset, task, app, env_group + ): """Produces json file with environment based on project and app. Called by Deadline plugin to propagate environment into render jobs. """ - from ayon_core.lib.applications import ( - get_app_environments_for_context, - LaunchTypes, + from ayon_core.addon import AddonsManager + + warnings.warn( + ( + "Command 'extractenvironments' is deprecated and will be" + " removed in future. Please use " + "'addon applications extractenvironments ...' instead." + ), + DeprecationWarning ) - if all((project, asset, task, app)): - env = get_app_environments_for_context( - project, - asset, - task, - app, - env_group=env_group, - launch_type=LaunchTypes.farm_render + addons_manager = AddonsManager() + applications_addon = addons_manager.get_enabled_addon("applications") + if applications_addon is None: + raise RuntimeError( + "Applications addon is not available or enabled." ) - else: - env = os.environ.copy() - output_dir = os.path.dirname(output_json_path) - if not os.path.exists(output_dir): - os.makedirs(output_dir) - - with open(output_json_path, "w") as file_stream: - json.dump(env, file_stream, indent=4) + # Please ignore the fact this is using private method + applications_addon._cli_extract_environments( + output_json_path, project, asset, task, app, env_group + ) @staticmethod def contextselection(output_path, project_name, folder_path, strict): diff --git a/client/ayon_core/hooks/pre_add_last_workfile_arg.py b/client/ayon_core/hooks/pre_add_last_workfile_arg.py index d11bb106d6..74964e0df9 100644 --- a/client/ayon_core/hooks/pre_add_last_workfile_arg.py +++ b/client/ayon_core/hooks/pre_add_last_workfile_arg.py @@ -1,6 +1,6 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class AddLastWorkfileToLaunchArgs(PreLaunchHook): diff --git a/client/ayon_core/hooks/pre_copy_template_workfile.py b/client/ayon_core/hooks/pre_copy_template_workfile.py index 096ad7dd7e..c884116578 100644 --- a/client/ayon_core/hooks/pre_copy_template_workfile.py +++ b/client/ayon_core/hooks/pre_copy_template_workfile.py @@ -1,7 +1,7 @@ import os import shutil from ayon_core.settings import get_project_settings -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.pipeline.workfile import ( get_custom_workfile_template, get_custom_workfile_template_by_string_context diff --git a/client/ayon_core/hooks/pre_create_extra_workdir_folders.py b/client/ayon_core/hooks/pre_create_extra_workdir_folders.py index 72c6bf2f68..8cbdaa338e 100644 --- a/client/ayon_core/hooks/pre_create_extra_workdir_folders.py +++ b/client/ayon_core/hooks/pre_create_extra_workdir_folders.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.pipeline.workfile import create_workdir_extra_folders diff --git a/client/ayon_core/hooks/pre_global_host_data.py b/client/ayon_core/hooks/pre_global_host_data.py index 27e66450ab..e93b512742 100644 --- a/client/ayon_core/hooks/pre_global_host_data.py +++ b/client/ayon_core/hooks/pre_global_host_data.py @@ -1,7 +1,7 @@ from ayon_api import get_project, get_folder_by_path, get_task_by_name -from ayon_core.lib.applications import ( - PreLaunchHook, +from ayon_applications import PreLaunchHook +from ayon_applications.utils import ( EnvironmentPrepData, prepare_app_environments, prepare_context_environments diff --git a/client/ayon_core/hooks/pre_mac_launch.py b/client/ayon_core/hooks/pre_mac_launch.py index 34680155f1..b234a20310 100644 --- a/client/ayon_core/hooks/pre_mac_launch.py +++ b/client/ayon_core/hooks/pre_mac_launch.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class LaunchWithTerminal(PreLaunchHook): diff --git a/client/ayon_core/hooks/pre_new_console_apps.py b/client/ayon_core/hooks/pre_new_console_apps.py index c81b924573..9777d37900 100644 --- a/client/ayon_core/hooks/pre_new_console_apps.py +++ b/client/ayon_core/hooks/pre_new_console_apps.py @@ -1,5 +1,5 @@ import subprocess -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class LaunchNewConsoleApps(PreLaunchHook): diff --git a/client/ayon_core/hooks/pre_ocio_hook.py b/client/ayon_core/hooks/pre_ocio_hook.py index e135a5bb12..0817afec71 100644 --- a/client/ayon_core/hooks/pre_ocio_hook.py +++ b/client/ayon_core/hooks/pre_ocio_hook.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook +from ayon_applications import PreLaunchHook from ayon_core.pipeline.colorspace import get_imageio_config from ayon_core.pipeline.template_data import get_template_data_with_names diff --git a/client/ayon_core/host/dirmap.py b/client/ayon_core/host/dirmap.py index effafb6261..2e24877d28 100644 --- a/client/ayon_core/host/dirmap.py +++ b/client/ayon_core/host/dirmap.py @@ -36,23 +36,23 @@ class HostDirmap(object): host_name, project_name, project_settings=None, - sync_module=None + sitesync_addon=None ): self.host_name = host_name self.project_name = project_name self._project_settings = project_settings - self._sync_module = sync_module + self._sitesync_addon = sitesync_addon # to limit reinit of Modules - self._sync_module_discovered = sync_module is not None + self._sitesync_addon_discovered = sitesync_addon is not None self._log = None @property - def sync_module(self): - if not self._sync_module_discovered: - self._sync_module_discovered = True + def sitesync_addon(self): + if not self._sitesync_addon_discovered: + self._sitesync_addon_discovered = True manager = AddonsManager() - self._sync_module = manager.get("sync_server") - return self._sync_module + self._sitesync_addon = manager.get("sitesync") + return self._sitesync_addon @property def project_settings(self): @@ -158,25 +158,25 @@ class HostDirmap(object): """ project_name = self.project_name - sync_module = self.sync_module + sitesync_addon = self.sitesync_addon mapping = {} if ( - sync_module is None - or not sync_module.enabled - or project_name not in sync_module.get_enabled_projects() + sitesync_addon is None + or not sitesync_addon.enabled + or project_name not in sitesync_addon.get_enabled_projects() ): return mapping - active_site = sync_module.get_local_normalized_site( - sync_module.get_active_site(project_name)) - remote_site = sync_module.get_local_normalized_site( - sync_module.get_remote_site(project_name)) + active_site = sitesync_addon.get_local_normalized_site( + sitesync_addon.get_active_site(project_name)) + remote_site = sitesync_addon.get_local_normalized_site( + sitesync_addon.get_remote_site(project_name)) self.log.debug( "active {} - remote {}".format(active_site, remote_site) ) if active_site == "local" and active_site != remote_site: - sync_settings = sync_module.get_sync_project_setting( + sync_settings = sitesync_addon.get_sync_project_setting( project_name, exclude_locals=False, cached=False) @@ -194,7 +194,7 @@ class HostDirmap(object): self.log.debug("remote overrides {}".format(remote_overrides)) current_platform = platform.system().lower() - remote_provider = sync_module.get_provider_for_site( + remote_provider = sitesync_addon.get_provider_for_site( project_name, remote_site ) # dirmap has sense only with regular disk provider, in the workfile diff --git a/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py b/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py index 979d9ff3e5..a37481566e 100644 --- a/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py +++ b/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py @@ -6,10 +6,7 @@ from ayon_core.lib import ( get_ayon_launcher_args, is_using_ayon_console, ) -from ayon_core.lib.applications import ( - PreLaunchHook, - LaunchTypes, -) +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.aftereffects import get_launch_script_path diff --git a/client/ayon_core/hosts/aftereffects/plugins/load/load_background.py b/client/ayon_core/hosts/aftereffects/plugins/load/load_background.py index 7c9bd2fcfd..5685011d5f 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/load/load_background.py +++ b/client/ayon_core/hosts/aftereffects/plugins/load/load_background.py @@ -21,7 +21,7 @@ class BackgroundLoader(api.AfterEffectsLoader): """ label = "Load JSON Background" product_types = {"background"} - representations = ["json"] + representations = {"json"} def load(self, context, name=None, namespace=None, data=None): stub = self.get_stub() diff --git a/client/ayon_core/hosts/aftereffects/plugins/load/load_file.py b/client/ayon_core/hosts/aftereffects/plugins/load/load_file.py index 79e791af7b..4b81201722 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/load/load_file.py +++ b/client/ayon_core/hosts/aftereffects/plugins/load/load_file.py @@ -20,7 +20,7 @@ class FileLoader(api.AfterEffectsLoader): "review", "audio", } - representations = ["*"] + representations = {"*"} def load(self, context, name=None, namespace=None, data=None): stub = self.get_stub() diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py index 4134e9d593..c28042b6ae 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py @@ -41,7 +41,6 @@ class CollectAERender(publish.AbstractCollectRender): def get_instances(self, context): instances = [] - instances_to_remove = [] app_version = CollectAERender.get_stub().get_app_version() app_version = app_version[0:4] @@ -117,7 +116,10 @@ class CollectAERender(publish.AbstractCollectRender): fps=fps, app_version=app_version, publish_attributes=inst.data.get("publish_attributes", {}), - file_names=[item.file_name for item in render_q] + file_names=[item.file_name for item in render_q], + + # The source instance this render instance replaces + source_instance=inst ) comp = compositions_by_id.get(comp_id) @@ -145,10 +147,7 @@ class CollectAERender(publish.AbstractCollectRender): instance.families.remove("review") instances.append(instance) - instances_to_remove.append(inst) - for instance in instances_to_remove: - context.remove(instance) return instances def get_expected_files(self, render_instance): diff --git a/client/ayon_core/hosts/blender/addon.py b/client/ayon_core/hosts/blender/addon.py index b7484de243..6a4b325365 100644 --- a/client/ayon_core/hosts/blender/addon.py +++ b/client/ayon_core/hosts/blender/addon.py @@ -55,8 +55,7 @@ class BlenderAddon(AYONAddon, IHostAddon): ) # Define Qt binding if not defined - if not env.get("QT_PREFERRED_BINDING"): - env["QT_PREFERRED_BINDING"] = "PySide2" + env.pop("QT_PREFERRED_BINDING", None) def get_launch_hook_paths(self, app): if app.host_name != self.host_name: diff --git a/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py b/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py index 00b297f998..9041ef7309 100644 --- a/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py +++ b/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py @@ -1,6 +1,6 @@ from pathlib import Path -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class AddPythonScriptToLaunchArgs(PreLaunchHook): diff --git a/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py b/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py index c80a1bd669..87a4f5cfad 100644 --- a/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py +++ b/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py @@ -2,7 +2,7 @@ import os import re import subprocess from platform import system -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class InstallPySideToBlender(PreLaunchHook): @@ -31,7 +31,7 @@ class InstallPySideToBlender(PreLaunchHook): def inner_execute(self): # Get blender's python directory - version_regex = re.compile(r"^[2-4]\.[0-9]+$") + version_regex = re.compile(r"^([2-4])\.[0-9]+$") platform = system().lower() executable = self.launch_context.executable.executable_path @@ -42,7 +42,8 @@ class InstallPySideToBlender(PreLaunchHook): if os.path.basename(executable).lower() != expected_executable: self.log.info(( f"Executable does not lead to {expected_executable} file." - "Can't determine blender's python to check/install PySide2." + "Can't determine blender's python to check/install" + " Qt binding." )) return @@ -73,6 +74,15 @@ class InstallPySideToBlender(PreLaunchHook): return version_subfolder = version_subfolders[0] + before_blender_4 = False + if int(version_regex.match(version_subfolder).group(1)) < 4: + before_blender_4 = True + # Blender 4 has Python 3.11 which does not support 'PySide2' + # QUESTION could we always install PySide6? + qt_binding = "PySide2" if before_blender_4 else "PySide6" + # Use PySide6 6.6.3 because 6.7.0 had a bug + # - 'QTextEdit' can't be added to 'QBoxLayout' + qt_binding_version = None if before_blender_4 else "6.6.3" python_dir = os.path.join(versions_dir, version_subfolder, "python") python_lib = os.path.join(python_dir, "lib") @@ -116,22 +126,41 @@ class InstallPySideToBlender(PreLaunchHook): return # Check if PySide2 is installed and skip if yes - if self.is_pyside_installed(python_executable): + if self.is_pyside_installed(python_executable, qt_binding): self.log.debug("Blender has already installed PySide2.") return # Install PySide2 in blender's python if platform == "windows": - result = self.install_pyside_windows(python_executable) + result = self.install_pyside_windows( + python_executable, + qt_binding, + qt_binding_version, + before_blender_4, + ) else: - result = self.install_pyside(python_executable) + result = self.install_pyside( + python_executable, + qt_binding, + qt_binding_version, + ) if result: - self.log.info("Successfully installed PySide2 module to blender.") + self.log.info( + f"Successfully installed {qt_binding} module to blender." + ) else: - self.log.warning("Failed to install PySide2 module to blender.") + self.log.warning( + f"Failed to install {qt_binding} module to blender." + ) - def install_pyside_windows(self, python_executable): + def install_pyside_windows( + self, + python_executable, + qt_binding, + qt_binding_version, + before_blender_4, + ): """Install PySide2 python module to blender's python. Installation requires administration rights that's why it is required @@ -139,7 +168,6 @@ class InstallPySideToBlender(PreLaunchHook): administration rights. """ try: - import win32api import win32con import win32process import win32event @@ -150,12 +178,37 @@ class InstallPySideToBlender(PreLaunchHook): self.log.warning("Couldn't import \"pywin32\" modules") return + if qt_binding_version: + qt_binding = f"{qt_binding}=={qt_binding_version}" + try: # Parameters # - use "-m pip" as module pip to install PySide2 and argument # "--ignore-installed" is to force install module to blender's # site-packages and make sure it is binary compatible - parameters = "-m pip install --ignore-installed PySide2" + fake_exe = "fake.exe" + site_packages_prefix = os.path.dirname( + os.path.dirname(python_executable) + ) + args = [ + fake_exe, + "-m", + "pip", + "install", + "--ignore-installed", + qt_binding, + ] + if not before_blender_4: + # Define prefix for site package + # Python in blender 4.x is installing packages in AppData and + # not in blender's directory. + args.extend(["--prefix", site_packages_prefix]) + + parameters = ( + subprocess.list2cmdline(args) + .lstrip(fake_exe) + .lstrip(" ") + ) # Execute command and ask for administrator's rights process_info = ShellExecuteEx( @@ -173,20 +226,29 @@ class InstallPySideToBlender(PreLaunchHook): except pywintypes.error: pass - def install_pyside(self, python_executable): - """Install PySide2 python module to blender's python.""" + def install_pyside( + self, + python_executable, + qt_binding, + qt_binding_version, + ): + """Install Qt binding python module to blender's python.""" + if qt_binding_version: + qt_binding = f"{qt_binding}=={qt_binding_version}" try: # Parameters - # - use "-m pip" as module pip to install PySide2 and argument + # - use "-m pip" as module pip to install qt binding and argument # "--ignore-installed" is to force install module to blender's # site-packages and make sure it is binary compatible + # TODO find out if blender 4.x on linux/darwin does install + # qt binding to correct place. args = [ python_executable, "-m", "pip", "install", "--ignore-installed", - "PySide2", + qt_binding, ] process = subprocess.Popen( args, stdout=subprocess.PIPE, universal_newlines=True @@ -203,13 +265,15 @@ class InstallPySideToBlender(PreLaunchHook): except subprocess.SubprocessError: pass - def is_pyside_installed(self, python_executable): + def is_pyside_installed(self, python_executable, qt_binding): """Check if PySide2 module is in blender's pip list. Check that PySide2 is installed directly in blender's site-packages. It is possible that it is installed in user's site-packages but that may be incompatible with blender's python. """ + + qt_binding_low = qt_binding.lower() # Get pip list from blender's python executable args = [python_executable, "-m", "pip", "list"] process = subprocess.Popen(args, stdout=subprocess.PIPE) @@ -226,6 +290,6 @@ class InstallPySideToBlender(PreLaunchHook): if not line: continue package_name = line[0:package_len].strip() - if package_name.lower() == "pyside2": + if package_name.lower() == qt_binding_low: return True return False diff --git a/client/ayon_core/hosts/blender/hooks/pre_windows_console.py b/client/ayon_core/hosts/blender/hooks/pre_windows_console.py index e3a8593cd9..47303a7af4 100644 --- a/client/ayon_core/hosts/blender/hooks/pre_windows_console.py +++ b/client/ayon_core/hosts/blender/hooks/pre_windows_console.py @@ -1,5 +1,5 @@ import subprocess -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class BlenderConsoleWindows(PreLaunchHook): diff --git a/client/ayon_core/hosts/blender/plugins/load/import_workfile.py b/client/ayon_core/hosts/blender/plugins/load/import_workfile.py index 3aa73a5143..d2e58c7752 100644 --- a/client/ayon_core/hosts/blender/plugins/load/import_workfile.py +++ b/client/ayon_core/hosts/blender/plugins/load/import_workfile.py @@ -43,7 +43,7 @@ class AppendBlendLoader(plugin.AssetLoader): so you could also use it as a new base. """ - representations = ["blend"] + representations = {"blend"} product_types = {"workfile"} label = "Append Workfile" @@ -68,7 +68,7 @@ class ImportBlendLoader(plugin.AssetLoader): so you could also use it as a new base. """ - representations = ["blend"] + representations = {"blend"} product_types = {"workfile"} label = "Import Workfile" diff --git a/client/ayon_core/hosts/blender/plugins/load/load_abc.py b/client/ayon_core/hosts/blender/plugins/load/load_abc.py index 938ae6106b..c074b5ed13 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_abc.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_abc.py @@ -27,7 +27,7 @@ class CacheModelLoader(plugin.AssetLoader): At least for now it only supports Alembic files. """ product_types = {"model", "pointcache", "animation"} - representations = ["abc"] + representations = {"abc"} label = "Load Alembic" icon = "code-fork" diff --git a/client/ayon_core/hosts/blender/plugins/load/load_action.py b/client/ayon_core/hosts/blender/plugins/load/load_action.py index 4161f8bff3..8135df042a 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_action.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_action.py @@ -25,7 +25,7 @@ class BlendActionLoader(plugin.AssetLoader): """ product_types = {"action"} - representations = ["blend"] + representations = {"blend"} label = "Link Action" icon = "code-fork" diff --git a/client/ayon_core/hosts/blender/plugins/load/load_animation.py b/client/ayon_core/hosts/blender/plugins/load/load_animation.py index effb91c48c..c9f3b33a6f 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_animation.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_animation.py @@ -17,7 +17,7 @@ class BlendAnimationLoader(plugin.AssetLoader): """ product_types = {"animation"} - representations = ["blend"] + representations = {"blend"} label = "Link Animation" icon = "code-fork" diff --git a/client/ayon_core/hosts/blender/plugins/load/load_audio.py b/client/ayon_core/hosts/blender/plugins/load/load_audio.py index db83c4bca2..3d2f412e2b 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_audio.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_audio.py @@ -21,7 +21,7 @@ class AudioLoader(plugin.AssetLoader): """Load audio in Blender.""" product_types = {"audio"} - representations = ["wav"] + representations = {"wav"} label = "Load Audio" icon = "volume-up" diff --git a/client/ayon_core/hosts/blender/plugins/load/load_blend.py b/client/ayon_core/hosts/blender/plugins/load/load_blend.py index 1984193a30..f9377d615c 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_blend.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_blend.py @@ -21,7 +21,7 @@ class BlendLoader(plugin.AssetLoader): """Load assets from a .blend file.""" product_types = {"model", "rig", "layout", "camera"} - representations = ["blend"] + representations = {"blend"} label = "Append Blend" icon = "code-fork" diff --git a/client/ayon_core/hosts/blender/plugins/load/load_blendscene.py b/client/ayon_core/hosts/blender/plugins/load/load_blendscene.py index 627941752f..f91d828d83 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_blendscene.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_blendscene.py @@ -19,7 +19,7 @@ class BlendSceneLoader(plugin.AssetLoader): """Load assets from a .blend file.""" product_types = {"blendScene"} - representations = ["blend"] + representations = {"blend"} label = "Append Blend" icon = "code-fork" diff --git a/client/ayon_core/hosts/blender/plugins/load/load_camera_abc.py b/client/ayon_core/hosts/blender/plugins/load/load_camera_abc.py index e677fc3e58..6178578081 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_camera_abc.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_camera_abc.py @@ -24,7 +24,7 @@ class AbcCameraLoader(plugin.AssetLoader): """ product_types = {"camera"} - representations = ["abc"] + representations = {"abc"} label = "Load Camera (ABC)" icon = "code-fork" diff --git a/client/ayon_core/hosts/blender/plugins/load/load_camera_fbx.py b/client/ayon_core/hosts/blender/plugins/load/load_camera_fbx.py index 14d61a6395..a510d42850 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_camera_fbx.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_camera_fbx.py @@ -24,7 +24,7 @@ class FbxCameraLoader(plugin.AssetLoader): """ product_types = {"camera"} - representations = ["fbx"] + representations = {"fbx"} label = "Load Camera (FBX)" icon = "code-fork" diff --git a/client/ayon_core/hosts/blender/plugins/load/load_fbx.py b/client/ayon_core/hosts/blender/plugins/load/load_fbx.py index 0042482284..e323d49dea 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_fbx.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_fbx.py @@ -24,7 +24,7 @@ class FbxModelLoader(plugin.AssetLoader): """ product_types = {"model", "rig"} - representations = ["fbx"] + representations = {"fbx"} label = "Load FBX" icon = "code-fork" diff --git a/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py b/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py index 7a3da1882e..d20eaad9fc 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py @@ -27,7 +27,7 @@ class JsonLayoutLoader(plugin.AssetLoader): """Load layout published from Unreal.""" product_types = {"layout"} - representations = ["json"] + representations = {"json"} label = "Load Layout" icon = "code-fork" @@ -167,7 +167,7 @@ class JsonLayoutLoader(plugin.AssetLoader): asset_group.empty_display_type = 'SINGLE_ARROW' avalon_container.objects.link(asset_group) - self._process(libpath, asset, asset_group, None) + self._process(libpath, asset_name, asset_group, None) bpy.context.scene.collection.objects.link(asset_group) diff --git a/client/ayon_core/hosts/blender/plugins/load/load_look.py b/client/ayon_core/hosts/blender/plugins/load/load_look.py index ce677a8471..75401f94ec 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_look.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_look.py @@ -24,7 +24,7 @@ class BlendLookLoader(plugin.AssetLoader): """ product_types = {"look"} - representations = ["json"] + representations = {"json"} label = "Load Look" icon = "code-fork" diff --git a/client/ayon_core/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py b/client/ayon_core/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py index 63b7dc7530..fb16bb7f8d 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py +++ b/client/ayon_core/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py @@ -12,7 +12,7 @@ from ayon_core.pipeline.publish import ( import ayon_core.hosts.blender.api.action -class ValidateMeshNoNegativeScale(pyblish.api.Validator, +class ValidateMeshNoNegativeScale(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure that meshes don't have a negative scale.""" diff --git a/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py b/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py index d94fff8f2b..8350c7b7c8 100644 --- a/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py +++ b/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py @@ -3,7 +3,7 @@ import shutil import winreg import subprocess from ayon_core.lib import get_ayon_launcher_args -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.celaction import CELACTION_ROOT_DIR diff --git a/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index 54dea15dff..1820569918 100644 --- a/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -3,11 +3,11 @@ import sys from pprint import pformat -class CollectCelactionCliKwargs(pyblish.api.Collector): +class CollectCelactionCliKwargs(pyblish.api.ContextPlugin): """ Collects all keyword arguments passed from the terminal """ label = "Collect Celaction Cli Kwargs" - order = pyblish.api.Collector.order - 0.1 + order = pyblish.api.CollectorOrder - 0.1 def process(self, context): args = list(sys.argv[1:]) diff --git a/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py b/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py index 1ff7ad7ccf..77a9435205 100644 --- a/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py +++ b/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py @@ -9,7 +9,7 @@ from ayon_core.lib import ( get_ayon_username, run_subprocess, ) -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts import flame as opflame diff --git a/client/ayon_core/hosts/flame/plugins/load/load_clip.py b/client/ayon_core/hosts/flame/plugins/load/load_clip.py index f528caeb29..40ab9c038b 100644 --- a/client/ayon_core/hosts/flame/plugins/load/load_clip.py +++ b/client/ayon_core/hosts/flame/plugins/load/load_clip.py @@ -18,7 +18,7 @@ class LoadClip(opfapi.ClipLoader): """ product_types = {"render2d", "source", "plate", "render", "review"} - representations = ["*"] + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) ) diff --git a/client/ayon_core/hosts/flame/plugins/load/load_clip_batch.py b/client/ayon_core/hosts/flame/plugins/load/load_clip_batch.py index 9bdd467d63..1b23a8b465 100644 --- a/client/ayon_core/hosts/flame/plugins/load/load_clip_batch.py +++ b/client/ayon_core/hosts/flame/plugins/load/load_clip_batch.py @@ -17,7 +17,7 @@ class LoadClipBatch(opfapi.ClipLoader): """ product_types = {"render2d", "source", "plate", "render", "review"} - representations = ["*"] + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) ) diff --git a/client/ayon_core/hosts/fusion/api/lib.py b/client/ayon_core/hosts/fusion/api/lib.py index e5bf4b5a44..08722463e1 100644 --- a/client/ayon_core/hosts/fusion/api/lib.py +++ b/client/ayon_core/hosts/fusion/api/lib.py @@ -3,9 +3,11 @@ import sys import re import contextlib -from ayon_core.lib import Logger - -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.lib import Logger, BoolDef, UILabelDef +from ayon_core.style import load_stylesheet +from ayon_core.pipeline import registered_host +from ayon_core.pipeline.create import CreateContext +from ayon_core.pipeline.context_tools import get_current_folder_entity self = sys.modules[__name__] self._project = None @@ -52,9 +54,15 @@ def update_frame_range(start, end, comp=None, set_render_range=True, comp.SetAttrs(attrs) -def set_current_context_framerange(): +def set_current_context_framerange(folder_entity=None): """Set Comp's frame range based on current folder.""" - folder_entity = get_current_project_folder() + if folder_entity is None: + folder_entity = get_current_folder_entity( + fields={"attrib.frameStart", + "attrib.frameEnd", + "attrib.handleStart", + "attrib.handleEnd"}) + folder_attributes = folder_entity["attrib"] start = folder_attributes["frameStart"] end = folder_attributes["frameEnd"] @@ -65,9 +73,24 @@ def set_current_context_framerange(): handle_end=handle_end) -def set_current_context_resolution(): +def set_current_context_fps(folder_entity=None): + """Set Comp's frame rate (FPS) to based on current asset""" + if folder_entity is None: + folder_entity = get_current_folder_entity(fields={"attrib.fps"}) + + fps = float(folder_entity["attrib"].get("fps", 24.0)) + comp = get_current_comp() + comp.SetPrefs({ + "Comp.FrameFormat.Rate": fps, + }) + + +def set_current_context_resolution(folder_entity=None): """Set Comp's resolution width x height default based on current folder""" - folder_entity = get_current_project_folder() + if folder_entity is None: + folder_entity = get_current_folder_entity( + fields={"attrib.resolutionWidth", "attrib.resolutionHeight"}) + folder_attributes = folder_entity["attrib"] width = folder_attributes["resolutionWidth"] height = folder_attributes["resolutionHeight"] @@ -101,7 +124,7 @@ def validate_comp_prefs(comp=None, force_repair=False): "attrib.resolutionHeight", "attrib.pixelAspect", } - folder_entity = get_current_project_folder(fields=fields) + folder_entity = get_current_folder_entity(fields=fields) folder_path = folder_entity["path"] folder_attributes = folder_entity["attrib"] @@ -158,7 +181,6 @@ def validate_comp_prefs(comp=None, force_repair=False): from . import menu from ayon_core.tools.utils import SimplePopup - from ayon_core.style import load_stylesheet dialog = SimplePopup(parent=menu.menu) dialog.setWindowTitle("Fusion comp has invalid configuration") @@ -285,3 +307,96 @@ def comp_lock_and_undo_chunk( finally: comp.Unlock() comp.EndUndo(keep_undo) + + +def update_content_on_context_change(): + """Update all Creator instances to current asset""" + host = registered_host() + context = host.get_current_context() + + folder_path = context["folder_path"] + task = context["task_name"] + + create_context = CreateContext(host, reset=True) + + for instance in create_context.instances: + instance_folder_path = instance.get("folderPath") + if instance_folder_path and instance_folder_path != folder_path: + instance["folderPath"] = folder_path + instance_task = instance.get("task") + if instance_task and instance_task != task: + instance["task"] = task + + create_context.save_changes() + + +def prompt_reset_context(): + """Prompt the user what context settings to reset. + This prompt is used on saving to a different task to allow the scene to + get matched to the new context. + """ + # TODO: Cleanup this prototyped mess of imports and odd dialog + from ayon_core.tools.attribute_defs.dialog import ( + AttributeDefinitionsDialog + ) + from qtpy import QtCore + + definitions = [ + UILabelDef( + label=( + "You are saving your workfile into a different folder or task." + "\n\n" + "Would you like to update some settings to the new context?\n" + ) + ), + BoolDef( + "fps", + label="FPS", + tooltip="Reset Comp FPS", + default=True + ), + BoolDef( + "frame_range", + label="Frame Range", + tooltip="Reset Comp start and end frame ranges", + default=True + ), + BoolDef( + "resolution", + label="Comp Resolution", + tooltip="Reset Comp resolution", + default=True + ), + BoolDef( + "instances", + label="Publish instances", + tooltip="Update all publish instance's folder and task to match " + "the new folder and task", + default=True + ), + ] + + dialog = AttributeDefinitionsDialog(definitions) + dialog.setWindowFlags( + dialog.windowFlags() | QtCore.Qt.WindowStaysOnTopHint + ) + dialog.setWindowTitle("Saving to different context.") + dialog.setStyleSheet(load_stylesheet()) + if not dialog.exec_(): + return None + + options = dialog.get_values() + folder_entity = get_current_folder_entity() + if options["frame_range"]: + set_current_context_framerange(folder_entity) + + if options["fps"]: + set_current_context_fps(folder_entity) + + if options["resolution"]: + set_current_context_resolution(folder_entity) + + if options["instances"]: + update_content_on_context_change() + + dialog.deleteLater() diff --git a/client/ayon_core/hosts/fusion/api/pipeline.py b/client/ayon_core/hosts/fusion/api/pipeline.py index dfac0640b0..2d1073ec7d 100644 --- a/client/ayon_core/hosts/fusion/api/pipeline.py +++ b/client/ayon_core/hosts/fusion/api/pipeline.py @@ -5,6 +5,7 @@ import os import sys import logging import contextlib +from pathlib import Path import pyblish.api from qtpy import QtCore @@ -28,7 +29,8 @@ from ayon_core.tools.utils import host_tools from .lib import ( get_current_comp, - validate_comp_prefs + validate_comp_prefs, + prompt_reset_context ) log = Logger.get_logger(__name__) @@ -40,6 +42,9 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") +# Track whether the workfile tool is about to save +_about_to_save = False + class FusionLogHandler(logging.Handler): # Keep a reference to fusion's Print function (Remote Object) @@ -103,8 +108,10 @@ class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): # Register events register_event_callback("open", on_after_open) + register_event_callback("workfile.save.before", before_workfile_save) register_event_callback("save", on_save) register_event_callback("new", on_new) + register_event_callback("taskChanged", on_task_changed) # region workfile io api def has_unsaved_changes(self): @@ -168,6 +175,19 @@ def on_save(event): comp = event["sender"] validate_comp_prefs(comp) + # We are now starting the actual save directly + global _about_to_save + _about_to_save = False + + +def on_task_changed(): + global _about_to_save + print(f"Task changed: {_about_to_save}") + # TODO: Only do this if not headless + if _about_to_save: + # Let's prompt the user to update the context settings or not + prompt_reset_context() + def on_after_open(event): comp = event["sender"] @@ -201,6 +221,28 @@ def on_after_open(event): dialog.setStyleSheet(load_stylesheet()) +def before_workfile_save(event): + # Due to Fusion's external python process design we can't really + # detect whether the current Fusion environment matches the one the artists + # expects it to be. For example, our pipeline python process might + # have been shut down, and restarted - which will restart it to the + # environment Fusion started with; not necessarily where the artist + # is currently working. + # The `_about_to_save` var is used to detect context changes when + # saving into another asset. If we keep it False it will be ignored + # as context change. As such, before we change tasks we will only + # consider it the current filepath is within the currently known + # AVALON_WORKDIR. This way we avoid false positives of thinking it's + # saving to another context and instead sometimes just have false negatives + # where we fail to show the "Update on task change" prompt. + comp = get_current_comp() + filepath = comp.GetAttrs()["COMPS_FileName"] + workdir = os.environ.get("AYON_WORKDIR") + if Path(workdir) in Path(filepath).parents: + global _about_to_save + _about_to_save = True + + def ls(): """List containers from active Fusion scene @@ -337,7 +379,6 @@ class FusionEventHandler(QtCore.QObject): >>> handler = FusionEventHandler(parent=window) >>> handler.start() - """ ACTION_IDS = [ "Comp_Save", diff --git a/client/ayon_core/hosts/fusion/api/plugin.py b/client/ayon_core/hosts/fusion/api/plugin.py index 492841f967..efe8269120 100644 --- a/client/ayon_core/hosts/fusion/api/plugin.py +++ b/client/ayon_core/hosts/fusion/api/plugin.py @@ -16,6 +16,12 @@ from ayon_core.pipeline import ( AVALON_INSTANCE_ID, AYON_INSTANCE_ID, ) +from ayon_core.pipeline.workfile import get_workdir +from ayon_api import ( + get_project, + get_folder_by_path, + get_task_by_name +) class GenericCreateSaver(Creator): @@ -125,6 +131,8 @@ class GenericCreateSaver(Creator): product_name = data["productName"] if ( original_product_name != product_name + or tool.GetData("openpype.task") != data["task"] + or tool.GetData("openpype.folderPath") != data["folderPath"] or original_format != data["creator_attributes"]["image_format"] ): self._configure_saver_tool(data, tool, product_name) @@ -145,7 +153,30 @@ class GenericCreateSaver(Creator): folder_path = formatting_data["folderPath"] folder_name = folder_path.rsplit("/", 1)[-1] - workdir = os.path.normpath(os.getenv("AYON_WORKDIR")) + # If the folder path and task do not match the current context then the + # workdir is not just the `AYON_WORKDIR`. Hence, we need to actually + # compute the resulting workdir + if ( + data["folderPath"] == self.create_context.get_current_folder_path() + and data["task"] == self.create_context.get_current_task_name() + ): + workdir = os.path.normpath(os.getenv("AYON_WORKDIR")) + else: + # TODO: Optimize this logic + project_name = self.create_context.get_current_project_name() + project_entity = get_project(project_name) + folder_entity = get_folder_by_path(project_name, + data["folderPath"]) + task_entity = get_task_by_name(project_name, + folder_id=folder_entity["id"], + task_name=data["task"]) + workdir = get_workdir( + project_entity=project_entity, + folder_entity=folder_entity, + task_entity=task_entity, + host_name=self.create_context.host_name, + ) + formatting_data.update({ "workdir": workdir, "frame": "0" * frame_padding, diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_launch_menu_hook.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_launch_menu_hook.py new file mode 100644 index 0000000000..e70d4b844e --- /dev/null +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_launch_menu_hook.py @@ -0,0 +1,36 @@ +import os +from ayon_core.lib import PreLaunchHook +from ayon_core.hosts.fusion import FUSION_HOST_DIR + + +class FusionLaunchMenuHook(PreLaunchHook): + """Launch AYON menu on start of Fusion""" + app_groups = ["fusion"] + order = 9 + + def execute(self): + # Prelaunch hook is optional + settings = self.data["project_settings"][self.host_name] + if not settings["hooks"]["FusionLaunchMenuHook"]["enabled"]: + return + + variant = self.application.name + if variant.isnumeric(): + version = int(variant) + if version < 18: + print("Skipping launch of OpenPype menu on Fusion start " + "because Fusion version below 18.0 does not support " + "/execute argument on launch. " + f"Version detected: {version}") + return + else: + print(f"Application variant is not numeric: {variant}. " + "Validation for Fusion version 18+ for /execute " + "prelaunch argument skipped.") + + path = os.path.join(FUSION_HOST_DIR, + "deploy", + "MenuScripts", + "launch_menu.py").replace("\\", "/") + script = f"fusion:RunScript('{path}')" + self.launch_context.launch_args.extend(["/execute", script]) diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py index 10b1c9c45d..1064d0a83a 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py @@ -7,7 +7,7 @@ from ayon_core.hosts.fusion import ( FUSION_VERSIONS_DICT, get_fusion_version, ) -from ayon_core.lib.applications import ( +from ayon_applications import ( PreLaunchHook, LaunchTypes, ApplicationLaunchFailed, diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py index 5e97ae3de1..ef084b0483 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import ( +from ayon_applications import ( PreLaunchHook, LaunchTypes, ApplicationLaunchFailed, diff --git a/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py b/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py index a9db39e24e..4678d5bac7 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py @@ -3,7 +3,7 @@ import subprocess import platform import uuid -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class InstallPySideToFusion(PreLaunchHook): @@ -85,7 +85,6 @@ class InstallPySideToFusion(PreLaunchHook): administration rights. """ try: - import win32api import win32con import win32process import win32event diff --git a/client/ayon_core/hosts/fusion/plugins/load/actions.py b/client/ayon_core/hosts/fusion/plugins/load/actions.py index 9600479680..95400ea41c 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/actions.py +++ b/client/ayon_core/hosts/fusion/plugins/load/actions.py @@ -17,7 +17,7 @@ class FusionSetFrameRangeLoader(load.LoaderPlugin): "pointcache", "render", } - representations = ["*"] + representations = {"*"} extensions = {"*"} label = "Set frame range" @@ -54,7 +54,7 @@ class FusionSetFrameRangeWithHandlesLoader(load.LoaderPlugin): "pointcache", "render", } - representations = ["*"] + representations = {"*"} label = "Set frame range (with handles)" order = 12 diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_alembic.py b/client/ayon_core/hosts/fusion/plugins/load/load_alembic.py index ae2175964d..312362caca 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_alembic.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_alembic.py @@ -13,7 +13,7 @@ class FusionLoadAlembicMesh(load.LoaderPlugin): """Load Alembic mesh into Fusion""" product_types = {"pointcache", "model"} - representations = ["*"] + representations = {"*"} extensions = {"abc"} label = "Load alembic mesh" diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_fbx.py b/client/ayon_core/hosts/fusion/plugins/load/load_fbx.py index 68b7cdacd1..a84e7e0914 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_fbx.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_fbx.py @@ -13,7 +13,7 @@ class FusionLoadFBXMesh(load.LoaderPlugin): """Load FBX mesh into Fusion""" product_types = {"*"} - representations = ["*"] + representations = {"*"} extensions = { "3ds", "amc", diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_sequence.py b/client/ayon_core/hosts/fusion/plugins/load/load_sequence.py index f0a8233377..7c70b54e48 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_sequence.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_sequence.py @@ -137,7 +137,7 @@ class FusionLoadSequence(load.LoaderPlugin): "image", "online", } - representations = ["*"] + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) ) diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_usd.py b/client/ayon_core/hosts/fusion/plugins/load/load_usd.py index 2f8eeb4c66..309b0c094c 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_usd.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_usd.py @@ -17,7 +17,7 @@ class FusionLoadUSD(load.LoaderPlugin): """ product_types = {"*"} - representations = ["*"] + representations = {"*"} extensions = {"usd", "usda", "usdz"} label = "Load USD" diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_workfile.py b/client/ayon_core/hosts/fusion/plugins/load/load_workfile.py index fd2fa7c08b..818fbcb187 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_workfile.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_workfile.py @@ -15,7 +15,7 @@ class FusionLoadWorkfile(load.LoaderPlugin): """Load the content of a workfile into Fusion""" product_types = {"workfile"} - representations = ["*"] + representations = {"*"} extensions = {"comp"} label = "Load Workfile" diff --git a/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py b/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py index 36102d02cb..7a2844d5db 100644 --- a/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py @@ -37,14 +37,13 @@ class CollectFusionRender( aspect_x = comp_frame_format_prefs["AspectX"] aspect_y = comp_frame_format_prefs["AspectY"] - instances = [] - instances_to_remove = [] current_file = context.data["currentFile"] version = context.data["version"] project_entity = context.data["projectEntity"] + instances = [] for inst in context: if not inst.data.get("active", True): continue @@ -91,7 +90,10 @@ class CollectFusionRender( frameStep=1, fps=comp_frame_format_prefs.get("Rate"), app_version=comp.GetApp().Version, - publish_attributes=inst.data.get("publish_attributes", {}) + publish_attributes=inst.data.get("publish_attributes", {}), + + # The source instance this render instance replaces + source_instance=inst ) render_target = inst.data["creator_attributes"]["render_target"] @@ -114,13 +116,7 @@ class CollectFusionRender( # to skip ExtractReview locally instance.families.remove("review") - # add new instance to the list and remove the original - # instance since it is not needed anymore instances.append(instance) - instances_to_remove.append(inst) - - for instance in instances_to_remove: - context.remove(instance) return instances diff --git a/client/ayon_core/hosts/harmony/api/README.md b/client/ayon_core/hosts/harmony/api/README.md index 7ac185638a..b8d1dbc100 100644 --- a/client/ayon_core/hosts/harmony/api/README.md +++ b/client/ayon_core/hosts/harmony/api/README.md @@ -590,7 +590,7 @@ class ImageSequenceLoader(load.LoaderPlugin): "reference", "review", } - representations = ["*"] + representations = {"*"} extensions = {"jpeg", "png", "jpg"} def load(self, context, name=None, namespace=None, data=None): diff --git a/client/ayon_core/hosts/harmony/api/pipeline.py b/client/ayon_core/hosts/harmony/api/pipeline.py index d842ccd414..1e3ea0ba21 100644 --- a/client/ayon_core/hosts/harmony/api/pipeline.py +++ b/client/ayon_core/hosts/harmony/api/pipeline.py @@ -13,7 +13,7 @@ from ayon_core.pipeline import ( AVALON_CONTAINER_ID, ) from ayon_core.pipeline.load import get_outdated_containers -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.hosts.harmony import HARMONY_ADDON_ROOT import ayon_core.hosts.harmony.api as harmony @@ -50,7 +50,7 @@ def get_current_context_settings(): """ - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() folder_attributes = folder_entity["attrib"] fps = folder_attributes.get("fps") diff --git a/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py b/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py index bbad14084a..4d38cd09b3 100644 --- a/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py +++ b/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py @@ -6,10 +6,7 @@ from ayon_core.lib import ( get_ayon_launcher_args, is_using_ayon_console, ) -from ayon_core.lib.applications import ( - PreLaunchHook, - LaunchTypes, -) +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.harmony import get_launch_script_path diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_audio.py b/client/ayon_core/hosts/harmony/plugins/load/load_audio.py index 00f3ac77ec..d23f3ed034 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_audio.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_audio.py @@ -36,7 +36,7 @@ class ImportAudioLoader(load.LoaderPlugin): """Import audio.""" product_types = {"shot", "audio"} - representations = ["wav"] + representations = {"wav"} label = "Import Audio" def load(self, context, name=None, namespace=None, data=None): diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_background.py b/client/ayon_core/hosts/harmony/plugins/load/load_background.py index 74bc5a4fd8..dad6ac2f22 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_background.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_background.py @@ -234,7 +234,7 @@ class BackgroundLoader(load.LoaderPlugin): Stores the imported asset in a container named after the asset. """ product_types = {"background"} - representations = ["json"] + representations = {"json"} def load(self, context, name=None, namespace=None, data=None): diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_imagesequence.py b/client/ayon_core/hosts/harmony/plugins/load/load_imagesequence.py index bf4b87a03e..f81ebca9af 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_imagesequence.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_imagesequence.py @@ -28,7 +28,7 @@ class ImageSequenceLoader(load.LoaderPlugin): "reference", "review", } - representations = ["*"] + representations = {"*"} extensions = {"jpeg", "png", "jpg"} def load(self, context, name=None, namespace=None, data=None): diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_palette.py b/client/ayon_core/hosts/harmony/plugins/load/load_palette.py index d5fbeb323b..24f4b4e8d4 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_palette.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_palette.py @@ -12,7 +12,7 @@ class ImportPaletteLoader(load.LoaderPlugin): """Import palettes.""" product_types = {"palette", "harmony.palette"} - representations = ["plt"] + representations = {"plt"} label = "Import Palette" def load(self, context, name=None, namespace=None, data=None): diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_template.py b/client/ayon_core/hosts/harmony/plugins/load/load_template.py index 48064f2b75..96dadb0375 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_template.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_template.py @@ -24,7 +24,7 @@ class TemplateLoader(load.LoaderPlugin): """ product_types = {"template", "workfile"} - representations = ["*"] + representations = {"*"} label = "Load Template" icon = "gift" diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_template_workfile.py b/client/ayon_core/hosts/harmony/plugins/load/load_template_workfile.py index c7132ce373..fa5ffe5105 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_template_workfile.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_template_workfile.py @@ -14,7 +14,7 @@ class ImportTemplateLoader(load.LoaderPlugin): """Import templates.""" product_types = {"harmony.template", "workfile"} - representations = ["*"] + representations = {"*"} label = "Import Template" def load(self, context, name=None, namespace=None, data=None): @@ -61,5 +61,5 @@ class ImportWorkfileLoader(ImportTemplateLoader): """Import workfiles.""" product_types = {"workfile"} - representations = ["zip"] + representations = {"zip"} label = "Import Workfile" diff --git a/client/ayon_core/hosts/hiero/api/lib.py b/client/ayon_core/hosts/hiero/api/lib.py index ecb3460fb4..8682ff7780 100644 --- a/client/ayon_core/hosts/hiero/api/lib.py +++ b/client/ayon_core/hosts/hiero/api/lib.py @@ -248,8 +248,12 @@ def get_track_items( # collect all available active sequence track items if not return_list: sequence = get_current_sequence(name=sequence_name) - # get all available tracks from sequence - tracks = list(sequence.audioTracks()) + list(sequence.videoTracks()) + tracks = [] + if sequence is not None: + # get all available tracks from sequence + tracks.extend(sequence.audioTracks()) + tracks.extend(sequence.videoTracks()) + # loop all tracks for track in tracks: if check_locked and track.isLocked(): diff --git a/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py b/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py index 2985a81317..1fc808fdd1 100644 --- a/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py +++ b/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py @@ -137,7 +137,7 @@ class CreateShotClip(phiero.Creator): "value": ["", "main", "bg", "fg", "bg", "animatic"], "type": "QComboBox", - "label": "pRODUCT Name", + "label": "Product Name", "target": "ui", "toolTip": "chose product name pattern, if is selected, name of track layer will be used", # noqa "order": 0}, @@ -159,7 +159,7 @@ class CreateShotClip(phiero.Creator): "type": "QCheckBox", "label": "Include audio", "target": "tag", - "toolTip": "Process productS with corresponding audio", # noqa + "toolTip": "Process products with corresponding audio", # noqa "order": 3}, "sourceResolution": { "value": False, diff --git a/client/ayon_core/hosts/hiero/plugins/load/load_clip.py b/client/ayon_core/hosts/hiero/plugins/load/load_clip.py index 72d7e03a9a..715e8c508e 100644 --- a/client/ayon_core/hosts/hiero/plugins/load/load_clip.py +++ b/client/ayon_core/hosts/hiero/plugins/load/load_clip.py @@ -16,7 +16,7 @@ class LoadClip(phiero.SequenceLoader): """ product_types = {"render2d", "source", "plate", "render", "review"} - representations = ["*"] + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) ) diff --git a/client/ayon_core/hosts/hiero/plugins/load/load_effects.py b/client/ayon_core/hosts/hiero/plugins/load/load_effects.py index fd6b8ed694..92aa2de325 100644 --- a/client/ayon_core/hosts/hiero/plugins/load/load_effects.py +++ b/client/ayon_core/hosts/hiero/plugins/load/load_effects.py @@ -15,7 +15,7 @@ class LoadEffects(load.LoaderPlugin): """Loading colorspace soft effect exported from nukestudio""" product_types = {"effect"} - representations = ["*"] + representations = {"*"} extension = {"json"} label = "Load Effects" diff --git a/client/ayon_core/hosts/hiero/plugins/publish/extract_thumbnail.py b/client/ayon_core/hosts/hiero/plugins/publish/extract_thumbnail.py index bcaf5308d9..3599a830d2 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/extract_thumbnail.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/extract_thumbnail.py @@ -4,12 +4,12 @@ import pyblish.api from ayon_core.pipeline import publish -class ExtractThumnail(publish.Extractor): +class ExtractThumbnail(publish.Extractor): """ - Extractor for track item's tumnails + Extractor for track item's tumbnails """ - label = "Extract Thumnail" + label = "Extract Thumbnail" order = pyblish.api.ExtractorOrder families = ["plate", "take"] hosts = ["hiero"] @@ -48,7 +48,7 @@ class ExtractThumnail(publish.Extractor): self.log.debug( "__ thumb_path: `{}`, frame: `{}`".format(thumbnail, thumb_frame)) - self.log.info("Thumnail was generated to: {}".format(thumb_path)) + self.log.info("Thumbnail was generated to: {}".format(thumb_path)) thumb_representation = { 'files': thumb_file, 'stagingDir': staging_dir, diff --git a/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py b/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py index d6fbcd7575..b7a508f0b5 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py @@ -90,20 +90,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if "entity_type" in parent: parent["folder_type"] = parent.pop("entity_type") - asset, asset_name = self._get_folder_data(tag_data) - - product_name = tag_data.get("productName") - if product_name is None: - product_name = tag_data["subset"] + folder_path, folder_name = self._get_folder_data(tag_data) families = [str(f) for f in tag_data["families"]] - # form label - label = "{} -".format(asset) - if asset_name != clip_name: - label += " ({})".format(clip_name) - label += " {}".format(product_name) - # TODO: remove backward compatibility product_name = tag_data.get("productName") if product_name is None: @@ -113,7 +103,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # backward compatibility: product_name should not be missing if not product_name: self.log.error( - "Product name is not defined for: {}".format(asset)) + "Product name is not defined for: {}".format(folder_path)) # TODO: remove backward compatibility product_type = tag_data.get("productType") @@ -124,15 +114,21 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # backward compatibility: product_type should not be missing if not product_type: self.log.error( - "Product type is not defined for: {}".format(asset)) + "Product type is not defined for: {}".format(folder_path)) + + # form label + label = "{} -".format(folder_path) + if folder_name != clip_name: + label += " ({})".format(clip_name) + label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, product_name), + "name": "{}_{}".format(folder_path, product_name), "label": label, - "folderPath": asset, - "asset_name": asset_name, "productName": product_name, "productType": product_type, + "folderPath": folder_path, + "asset_name": folder_name, "item": track_item, "families": families, "publish": tag_data["publish"], @@ -222,19 +218,19 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not hierarchy_data: return - asset = data["folderPath"] - asset_name = data["asset_name"] + folder_path = data["folderPath"] + folder_name = data["asset_name"] product_type = "shot" # form label - label = "{} -".format(asset) - if asset_name != clip_name: + label = "{} -".format(folder_path) + if folder_name != clip_name: label += " ({}) ".format(clip_name) label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, product_name), + "name": "{}_{}".format(folder_path, product_name), "label": label, "productName": product_name, "productType": product_type, @@ -281,19 +277,19 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not self.test_any_audio(item): return - asset = data["folderPath"] + folder_path = data["folderPath"] asset_name = data["asset_name"] product_type = "audio" # form label - label = "{} -".format(asset) + label = "{} -".format(folder_path) if asset_name != clip_name: label += " ({}) ".format(clip_name) label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, product_name), + "name": "{}_{}".format(folder_path, product_name), "label": label, "productName": product_name, "productType": product_type, diff --git a/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py b/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py index 8df6cd4261..0b6b34ea6c 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py @@ -17,8 +17,8 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.491 def process(self, context): - asset = context.data["folderPath"] - asset_name = asset.split("/")[-1] + folder_path = context.data["folderPath"] + folder_name = folder_path.split("/")[-1] active_timeline = hiero.ui.activeSequence() project = active_timeline.project() @@ -62,12 +62,12 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): product_type = "workfile" instance_data = { "label": "{} - {}Main".format( - asset, product_type), - "name": "{}_{}".format(asset_name, product_type), - "folderPath": context.data["folderPath"], + folder_path, product_type), + "name": "{}_{}".format(folder_name, product_type), + "folderPath": folder_path, # TODO use 'get_product_name' "productName": "{}{}Main".format( - asset_name, product_type.capitalize() + folder_name, product_type.capitalize() ), "item": project, "productType": product_type, diff --git a/client/ayon_core/hosts/houdini/api/creator_node_shelves.py b/client/ayon_core/hosts/houdini/api/creator_node_shelves.py index 6e48cb375b..72c157f187 100644 --- a/client/ayon_core/hosts/houdini/api/creator_node_shelves.py +++ b/client/ayon_core/hosts/houdini/api/creator_node_shelves.py @@ -91,7 +91,7 @@ def create_interactive(creator_identifier, **kwargs): pane = stateutils.activePane(kwargs) if isinstance(pane, hou.NetworkEditor): pwd = pane.pwd() - project_name = context.get_current_project_name(), + project_name = context.get_current_project_name() folder_path = context.get_current_folder_path() task_name = context.get_current_task_name() folder_entity = ayon_api.get_folder_by_path( diff --git a/client/ayon_core/hosts/houdini/api/lib.py b/client/ayon_core/hosts/houdini/api/lib.py index cb7427e9af..63f51b2423 100644 --- a/client/ayon_core/hosts/houdini/api/lib.py +++ b/client/ayon_core/hosts/houdini/api/lib.py @@ -22,7 +22,7 @@ from ayon_core.pipeline import ( ) from ayon_core.pipeline.create import CreateContext from ayon_core.pipeline.template_data import get_template_data -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.tools.utils import PopupUpdateKeys, SimplePopup from ayon_core.tools.utils.host_tools import get_tool_by_name @@ -39,7 +39,7 @@ def get_folder_fps(folder_entity=None): """Return current folder fps.""" if folder_entity is None: - folder_entity = get_current_project_folder(fields=["attrib.fps"]) + folder_entity = get_current_folder_entity(fields=["attrib.fps"]) return folder_entity["attrib"]["fps"] @@ -243,7 +243,10 @@ def render_rop(ropnode): try: ropnode.render(verbose=verbose, # Allow Deadline to capture completion percentage - output_progress=verbose) + output_progress=verbose, + # Render only this node + # (do not render any of its dependencies) + ignore_inputs=True) except hou.Error as exc: # The hou.Error is not inherited from a Python Exception class, # so we explicitly capture the houdini error, otherwise pyblish @@ -760,7 +763,7 @@ def set_camera_resolution(camera, folder_entity=None): """Apply resolution to camera from folder entity of the publish""" if not folder_entity: - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() resolution = get_resolution_from_folder(folder_entity) @@ -970,7 +973,7 @@ def self_publish(): Firstly, it gets the node and its dependencies. Then, it deactivates all other ROPs - And finaly, it triggers the publishing action. + And finally, it triggers the publishing action. """ result, comment = hou.ui.readInput( @@ -1020,6 +1023,82 @@ def add_self_publish_button(node): node.setParmTemplateGroup(template) +def get_scene_viewer(): + """ + Return an instance of a visible viewport. + + There may be many, some could be closed, any visible are current + + Returns: + Optional[hou.SceneViewer]: A scene viewer, if any. + """ + panes = hou.ui.paneTabs() + panes = [x for x in panes if x.type() == hou.paneTabType.SceneViewer] + panes = sorted(panes, key=lambda x: x.isCurrentTab()) + if panes: + return panes[-1] + + return None + + +def sceneview_snapshot( + sceneview, + filepath="$HIP/thumbnails/$HIPNAME.$F4.jpg", + frame_start=None, + frame_end=None): + """Take a snapshot of your scene view. + + It takes snapshot of your scene view for the given frame range. + So, it's capable of generating snapshots image sequence. + It works in different Houdini context e.g. Objects, Solaris + + Example: + This is how the function can be used:: + + from ayon_core.hosts.houdini.api import lib + sceneview = hou.ui.paneTabOfType(hou.paneTabType.SceneViewer) + lib.sceneview_snapshot(sceneview) + + Notes: + .png output will render poorly, so use .jpg. + + How it works: + Get the current sceneviewer (may be more than one or hidden) + and screengrab the perspective viewport to a file in the + publish location to be picked up with the publish. + + Credits: + https://www.sidefx.com/forum/topic/42808/?page=1#post-354796 + + Args: + sceneview (hou.SceneViewer): The scene view pane from which you want + to take a snapshot. + filepath (str): thumbnail filepath. it expects `$F4` token + when frame_end is bigger than frame_star other wise + each frame will override its predecessor. + frame_start (int): the frame at which snapshot starts + frame_end (int): the frame at which snapshot ends + """ + + if frame_start is None: + frame_start = hou.frame() + if frame_end is None: + frame_end = frame_start + + if not isinstance(sceneview, hou.SceneViewer): + log.debug("Wrong Input. {} is not of type hou.SceneViewer." + .format(sceneview)) + return + viewport = sceneview.curViewport() + + flip_settings = sceneview.flipbookSettings().stash() + flip_settings.frameRange((frame_start, frame_end)) + flip_settings.output(filepath) + flip_settings.outputToMPlay(False) + sceneview.flipbook(viewport, flip_settings) + log.debug("A snapshot of sceneview has been saved to: {}".format(filepath)) + + def update_content_on_context_change(): """Update all Creator instances to current asset""" host = registered_host() @@ -1098,4 +1177,4 @@ def prompt_reset_context(): if options["instances"]: update_content_on_context_change() - dialog.deleteLater() \ No newline at end of file + dialog.deleteLater() diff --git a/client/ayon_core/hosts/houdini/api/pipeline.py b/client/ayon_core/hosts/houdini/api/pipeline.py index b9446933ac..4797cf36a0 100644 --- a/client/ayon_core/hosts/houdini/api/pipeline.py +++ b/client/ayon_core/hosts/houdini/api/pipeline.py @@ -39,7 +39,7 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") # Track whether the workfile tool is about to save -ABOUT_TO_SAVE = False +_about_to_save = False class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): @@ -292,8 +292,8 @@ def ls(): def before_workfile_save(event): - global ABOUT_TO_SAVE - ABOUT_TO_SAVE = True + global _about_to_save + _about_to_save = True def before_save(): @@ -307,18 +307,14 @@ def on_save(): # update houdini vars lib.update_houdini_vars_context_dialog() - nodes = lib.get_id_required_nodes() - for node, new_id in lib.generate_ids(nodes): - lib.set_id(node, new_id, overwrite=False) - # We are now starting the actual save directly - global ABOUT_TO_SAVE - ABOUT_TO_SAVE = False + global _about_to_save + _about_to_save = False def on_task_changed(): - global ABOUT_TO_SAVE - if not IS_HEADLESS and ABOUT_TO_SAVE: + global _about_to_save + if not IS_HEADLESS and _about_to_save: # Let's prompt the user to update the context settings or not lib.prompt_reset_context() diff --git a/client/ayon_core/hosts/houdini/hooks/set_paths.py b/client/ayon_core/hosts/houdini/hooks/set_paths.py index 7eb346cc74..4b89ebe944 100644 --- a/client/ayon_core/hosts/houdini/hooks/set_paths.py +++ b/client/ayon_core/hosts/houdini/hooks/set_paths.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class SetPath(PreLaunchHook): diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py b/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py index b61b4cbd46..0ab5e2794e 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator plugin for creating alembic camera products.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance, CreatorError +from ayon_core.pipeline import CreatorError import hou @@ -23,7 +23,7 @@ class CreateAlembicCamera(plugin.HoudiniCreator): instance = super(CreateAlembicCamera, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) parms = { diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py index 6d992f136a..be5604c01c 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py @@ -29,7 +29,7 @@ class CreateArnoldAss(plugin.HoudiniCreator): instance = super(CreateArnoldAss, self).create( product_name, instance_data, - pre_create_data) # type: plugin.CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py index b7c5910a4f..f65b54a452 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py @@ -31,7 +31,7 @@ class CreateArnoldRop(plugin.HoudiniCreator): instance = super(CreateArnoldRop, self).create( product_name, instance_data, - pre_create_data) # type: plugin.CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py b/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py index 92c89c71cb..3749598b1d 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator plugin for creating pointcache bgeo files.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance, CreatorError +from ayon_core.pipeline import CreatorError import hou from ayon_core.lib import EnumDef, BoolDef @@ -25,7 +25,7 @@ class CreateBGEO(plugin.HoudiniCreator): instance = super(CreateBGEO, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_composite.py b/client/ayon_core/hosts/houdini/plugins/create/create_composite.py index a1104e5093..a25faf0e8e 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_composite.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_composite.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator plugin for creating composite sequences.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance, CreatorError +from ayon_core.pipeline import CreatorError import hou @@ -25,7 +25,7 @@ class CreateCompositeSequence(plugin.HoudiniCreator): instance = super(CreateCompositeSequence, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) filepath = "{}{}".format( diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_hda.py b/client/ayon_core/hosts/houdini/plugins/create/create_hda.py index b307293dc8..d399aa5e15 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_hda.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_hda.py @@ -78,7 +78,7 @@ class CreateHDA(plugin.HoudiniCreator): instance = super(CreateHDA, self).create( product_name, instance_data, - pre_create_data) # type: plugin.CreatedInstance + pre_create_data) return instance diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py index 9eb9d80cd3..e91ddbc0ac 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin to create Karma ROP.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance from ayon_core.lib import BoolDef, EnumDef, NumberDef @@ -25,7 +24,7 @@ class CreateKarmaROP(plugin.HoudiniCreator): instance = super(CreateKarmaROP, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py index bb10f3893c..e0cf035c35 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating pointcache alembics.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance from ayon_core.lib import BoolDef @@ -22,7 +21,7 @@ class CreateMantraIFD(plugin.HoudiniCreator): instance = super(CreateMantraIFD, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py index f15f49f463..64ecf428e9 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin to create Mantra ROP.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance from ayon_core.lib import EnumDef, BoolDef @@ -28,7 +27,7 @@ class CreateMantraROP(plugin.HoudiniCreator): instance = super(CreateMantraROP, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_usd.py b/client/ayon_core/hosts/houdini/plugins/create/create_usd.py index e5b1ef34cc..64a0c66b2e 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_usd.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_usd.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating USDs.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance import hou @@ -23,7 +22,7 @@ class CreateUSD(plugin.HoudiniCreator): instance = super(CreateUSD, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py b/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py index 2ff3e2a9e7..13efd08c13 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating USD renders.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance from ayon_core.lib import BoolDef, EnumDef import hou @@ -57,7 +56,7 @@ class CreateUSDRender(plugin.HoudiniCreator): instance = super(CreateUSDRender, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py b/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py index 9ac7ebdff7..c34cd2b4b5 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating VDB Caches.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance from ayon_core.lib import BoolDef import hou @@ -26,7 +25,7 @@ class CreateVDBCache(plugin.HoudiniCreator): instance = super(CreateVDBCache, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) file_path = "{}{}".format( diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py index 6b2396bffb..5ed9e848a7 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py @@ -3,7 +3,7 @@ import hou from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance, CreatorError +from ayon_core.pipeline import CreatorError from ayon_core.lib import EnumDef, BoolDef @@ -31,7 +31,7 @@ class CreateVrayROP(plugin.HoudiniCreator): instance = super(CreateVrayROP, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py b/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py index b813f82e2e..4cebd537bb 100644 --- a/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py +++ b/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py @@ -3,7 +3,7 @@ from ayon_core.hosts.houdini.api.lib import ( get_camera_from_container, set_camera_resolution ) -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity class SetCameraResolution(InventoryAction): @@ -19,7 +19,7 @@ class SetCameraResolution(InventoryAction): ) def process(self, containers): - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() for container in containers: node = container["node"] camera = get_camera_from_container(node) diff --git a/client/ayon_core/hosts/houdini/plugins/load/actions.py b/client/ayon_core/hosts/houdini/plugins/load/actions.py index fbd89ab9c2..3e9cc35504 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/actions.py +++ b/client/ayon_core/hosts/houdini/plugins/load/actions.py @@ -15,7 +15,7 @@ class SetFrameRangeLoader(load.LoaderPlugin): "vdbcache", "usd", } - representations = ["abc", "vdb", "usd"] + representations = {"abc", "vdb", "usd"} label = "Set frame range" order = 11 @@ -52,7 +52,7 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): "vdbcache", "usd", } - representations = ["abc", "vdb", "usd"] + representations = {"abc", "vdb", "usd"} label = "Set frame range (with handles)" order = 12 diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py b/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py index 37657cbdff..1bb9043cd0 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py @@ -11,7 +11,7 @@ class AbcLoader(load.LoaderPlugin): product_types = {"model", "animation", "pointcache", "gpuCache"} label = "Load Alembic" - representations = ["*"] + representations = {"*"} extensions = {"abc"} order = -10 icon = "code-fork" diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_alembic_archive.py b/client/ayon_core/hosts/houdini/plugins/load/load_alembic_archive.py index 39928fd952..a231bd9993 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_alembic_archive.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_alembic_archive.py @@ -11,7 +11,7 @@ class AbcArchiveLoader(load.LoaderPlugin): product_types = {"model", "animation", "pointcache", "gpuCache"} label = "Load Alembic as Archive" - representations = ["*"] + representations = {"*"} extensions = {"abc"} order = -5 icon = "code-fork" diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_ass.py b/client/ayon_core/hosts/houdini/plugins/load/load_ass.py index fd0e8f4604..6e0922e305 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_ass.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_ass.py @@ -13,7 +13,7 @@ class AssLoader(load.LoaderPlugin): product_types = {"ass"} label = "Load Arnold Procedural" - representations = ["ass"] + representations = {"ass"} order = -10 icon = "code-fork" color = "orange" diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_bgeo.py b/client/ayon_core/hosts/houdini/plugins/load/load_bgeo.py index fd8071c0de..a318b71963 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_bgeo.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_bgeo.py @@ -14,9 +14,9 @@ class BgeoLoader(load.LoaderPlugin): label = "Load bgeo" product_types = {"model", "pointcache", "bgeo"} - representations = [ + representations = { "bgeo", "bgeosc", "bgeogz", - "bgeo.sc", "bgeo.gz", "bgeo.lzma", "bgeo.bz2"] + "bgeo.sc", "bgeo.gz", "bgeo.lzma", "bgeo.bz2"} order = -10 icon = "code-fork" color = "orange" diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_camera.py b/client/ayon_core/hosts/houdini/plugins/load/load_camera.py index 7cb4542d0c..b7912f88f1 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_camera.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_camera.py @@ -89,7 +89,7 @@ class CameraLoader(load.LoaderPlugin): product_types = {"camera"} label = "Load Camera (abc)" - representations = ["abc"] + representations = {"abc"} order = -10 icon = "code-fork" diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_fbx.py b/client/ayon_core/hosts/houdini/plugins/load/load_fbx.py index a0c5e0c934..398019a3bd 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_fbx.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_fbx.py @@ -17,7 +17,7 @@ class FbxLoader(load.LoaderPlugin): order = -10 product_types = {"*"} - representations = ["*"] + representations = {"*"} extensions = {"fbx"} def load(self, context, name=None, namespace=None, data=None): diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py b/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py index 515ffa6027..d189a027fd 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py @@ -2,7 +2,7 @@ import os import re from ayon_core.pipeline import load -from openpype.hosts.houdini.api import pipeline +from ayon_core.hosts.houdini.api import pipeline import hou @@ -22,7 +22,7 @@ class FilePathLoader(load.LoaderPlugin): icon = "link" color = "white" product_types = {"*"} - representations = ["*"] + representations = {"*"} def load(self, context, name=None, namespace=None, data=None): @@ -103,8 +103,8 @@ class FilePathLoader(load.LoaderPlugin): parm) node.setParmTemplateGroup(parm_template_group) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_hda.py b/client/ayon_core/hosts/houdini/plugins/load/load_hda.py index df77783a34..10fc03be03 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_hda.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_hda.py @@ -12,7 +12,7 @@ class HdaLoader(load.LoaderPlugin): product_types = {"hda"} label = "Load Hda" - representations = ["hda"] + representations = {"hda"} order = -10 icon = "code-fork" color = "orange" diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_image.py b/client/ayon_core/hosts/houdini/plugins/load/load_image.py index 0429b1c3fe..dfbd3c11eb 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_image.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_image.py @@ -54,7 +54,7 @@ class ImageLoader(load.LoaderPlugin): "online", } label = "Load Image (COP2)" - representations = ["*"] + representations = {"*"} order = -10 icon = "code-fork" diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_redshift_proxy.py b/client/ayon_core/hosts/houdini/plugins/load/load_redshift_proxy.py index a6556619fc..f09856a970 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_redshift_proxy.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_redshift_proxy.py @@ -15,7 +15,7 @@ class RedshiftProxyLoader(load.LoaderPlugin): product_types = {"redshiftproxy"} label = "Load Redshift Proxy" - representations = ["rs"] + representations = {"rs"} order = -10 icon = "code-fork" color = "orange" diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_usd_layer.py b/client/ayon_core/hosts/houdini/plugins/load/load_usd_layer.py index 19950e2c98..4e6954c531 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_usd_layer.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_usd_layer.py @@ -14,7 +14,7 @@ class USDSublayerLoader(load.LoaderPlugin): "usdCamera", } label = "Sublayer USD" - representations = ["usd", "usda", "usdlc", "usdnc", "abc"] + representations = {"usd", "usda", "usdlc", "usdnc", "abc"} order = 1 icon = "code-fork" diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_usd_reference.py b/client/ayon_core/hosts/houdini/plugins/load/load_usd_reference.py index 25f98c7c7c..7e82a6abd0 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_usd_reference.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_usd_reference.py @@ -14,7 +14,7 @@ class USDReferenceLoader(load.LoaderPlugin): "usdCamera", } label = "Reference USD" - representations = ["usd", "usda", "usdlc", "usdnc", "abc"] + representations = {"usd", "usda", "usdlc", "usdnc", "abc"} order = -8 icon = "code-fork" diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py b/client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py index 5b7e022e73..506f6140bf 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py @@ -9,7 +9,7 @@ class SopUsdImportLoader(load.LoaderPlugin): label = "Load USD to SOPs" product_types = {"*"} - representations = ["usd"] + representations = {"usd"} order = -6 icon = "code-fork" color = "orange" diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_vdb.py b/client/ayon_core/hosts/houdini/plugins/load/load_vdb.py index d9808020d7..0008f0d5f8 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_vdb.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_vdb.py @@ -13,7 +13,7 @@ class VdbLoader(load.LoaderPlugin): product_types = {"vdbcache"} label = "Load VDB" - representations = ["vdb"] + representations = {"vdb"} order = -10 icon = "code-fork" color = "orange" diff --git a/client/ayon_core/hosts/houdini/plugins/load/show_usdview.py b/client/ayon_core/hosts/houdini/plugins/load/show_usdview.py index 9506d9dd0c..0158a6b963 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/show_usdview.py +++ b/client/ayon_core/hosts/houdini/plugins/load/show_usdview.py @@ -10,7 +10,7 @@ class ShowInUsdview(load.LoaderPlugin): """Open USD file in usdview""" label = "Show in usdview" - representations = ["*"] + representations = {"*"} product_types = {"*"} extensions = {"usd", "usda", "usdlc", "usdnc", "abc"} order = 15 diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_inputs.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_inputs.py index 7d7fabb315..6cf6bbf430 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_inputs.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_inputs.py @@ -1,9 +1,21 @@ +from collections import deque + import pyblish.api from ayon_core.pipeline import registered_host -def collect_input_containers(nodes): +def get_container_members(container): + node = container["node"] + # Usually the loaded containers don't have any complex references + # and the contained children should be all we need. So we disregard + # checking for .references() on the nodes. + members = set(node.allSubChildren()) + members.add(node) # include the node itself + return members + + +def collect_input_containers(containers, nodes): """Collect containers that contain any of the node in `nodes`. This will return any loaded Avalon container that contains at least one of @@ -11,30 +23,13 @@ def collect_input_containers(nodes): there are member nodes of that container. Returns: - list: Input avalon containers + list: Loaded containers that contain the `nodes` """ - - # Lookup by node ids - lookup = frozenset(nodes) - - containers = [] - host = registered_host() - for container in host.ls(): - - node = container["node"] - - # Usually the loaded containers don't have any complex references - # and the contained children should be all we need. So we disregard - # checking for .references() on the nodes. - members = set(node.allSubChildren()) - members.add(node) # include the node itself - - # If there's an intersection - if not lookup.isdisjoint(members): - containers.append(container) - - return containers + # Assume the containers have collected their cached '_members' data + # in the collector. + return [container for container in containers + if any(node in container["_members"] for node in nodes)] def iter_upstream(node): @@ -54,7 +49,7 @@ def iter_upstream(node): ) # Initialize process queue with the node's ancestors itself - queue = list(upstream) + queue = deque(upstream) collected = set(upstream) # Traverse upstream references for all nodes and yield them as we @@ -72,6 +67,10 @@ def iter_upstream(node): # Include the references' ancestors that have not been collected yet. for reference in references: + if reference in collected: + # Might have been collected in previous iteration + continue + ancestors = reference.inputAncestors( include_ref_inputs=True, follow_subnets=True ) @@ -108,13 +107,32 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): ) return - # Collect all upstream parents - nodes = list(iter_upstream(output)) - nodes.append(output) + # For large scenes the querying of "host.ls()" can be relatively slow + # e.g. up to a second. Many instances calling it easily slows this + # down. As such, we cache it so we trigger it only once. + # todo: Instead of hidden cache make "CollectContainers" plug-in + cache_key = "__cache_containers" + scene_containers = instance.context.data.get(cache_key, None) + if scene_containers is None: + # Query the scenes' containers if there's no cache yet + host = registered_host() + scene_containers = list(host.ls()) + for container in scene_containers: + # Embed the members into the container dictionary + container_members = set(get_container_members(container)) + container["_members"] = container_members + instance.context.data[cache_key] = scene_containers - # Collect containers for the given set of nodes - containers = collect_input_containers(nodes) + inputs = [] + if scene_containers: + # Collect all upstream parents + nodes = list(iter_upstream(output)) + nodes.append(output) + + # Collect containers for the given set of nodes + containers = collect_input_containers(scene_containers, nodes) + + inputs = [c["representation"] for c in containers] - inputs = [c["representation"] for c in containers] instance.data["inputRepresentations"] = inputs self.log.debug("Collected inputs: %s" % inputs) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/extract_active_view_thumbnail.py b/client/ayon_core/hosts/houdini/plugins/publish/extract_active_view_thumbnail.py new file mode 100644 index 0000000000..aedcb1da02 --- /dev/null +++ b/client/ayon_core/hosts/houdini/plugins/publish/extract_active_view_thumbnail.py @@ -0,0 +1,55 @@ +import pyblish.api +import tempfile +from ayon_core.pipeline import publish +from ayon_core.hosts.houdini.api import lib +from ayon_core.hosts.houdini.api.pipeline import IS_HEADLESS + + +class ExtractActiveViewThumbnail(publish.Extractor): + """Set instance thumbnail to a screengrab of current active viewport. + + This makes it so that if an instance does not have a thumbnail set yet that + it will get a thumbnail of the currently active view at the time of + publishing as a fallback. + + """ + order = pyblish.api.ExtractorOrder + 0.49 + label = "Extract Active View Thumbnail" + families = ["workfile"] + hosts = ["houdini"] + + def process(self, instance): + if IS_HEADLESS: + self.log.debug( + "Skip extraction of active view thumbnail, due to being in" + "headless mode." + ) + return + + thumbnail = instance.data.get("thumbnailPath") + if thumbnail: + # A thumbnail was already set for this instance + return + + view_thumbnail = self.get_view_thumbnail(instance) + if not view_thumbnail: + return + self.log.debug("Setting instance thumbnail path to: {}" + .format(view_thumbnail) + ) + instance.data["thumbnailPath"] = view_thumbnail + + def get_view_thumbnail(self, instance): + + sceneview = lib.get_scene_viewer() + if sceneview is None: + self.log.debug("Skipping Extract Active View Thumbnail" + " because no scene view was detected.") + return + + with tempfile.NamedTemporaryFile("w", suffix=".jpg", delete=False) as tmp: + lib.sceneview_snapshot(sceneview, tmp.name) + thumbnail_path = tmp.name + + instance.context.data["cleanupFullPaths"].append(thumbnail_path) + return thumbnail_path diff --git a/client/ayon_core/hosts/houdini/plugins/publish/increment_current_file.py b/client/ayon_core/hosts/houdini/plugins/publish/increment_current_file.py index 73145b211a..fe8fa25f10 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/increment_current_file.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/increment_current_file.py @@ -3,7 +3,6 @@ import pyblish.api from ayon_core.lib import version_up from ayon_core.pipeline import registered_host from ayon_core.pipeline.publish import get_errored_plugins_from_context -from ayon_core.hosts.houdini.api import HoudiniHost from ayon_core.pipeline.publish import KnownPublishError @@ -39,7 +38,7 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin): ) # Filename must not have changed since collecting - host = registered_host() # type: HoudiniHost + host = registered_host() current_file = host.current_file() if context.data["currentFile"] != current_file: raise KnownPublishError( diff --git a/client/ayon_core/hosts/max/api/lib.py b/client/ayon_core/hosts/max/api/lib.py index 5f13856c9b..48bb15f538 100644 --- a/client/ayon_core/hosts/max/api/lib.py +++ b/client/ayon_core/hosts/max/api/lib.py @@ -11,7 +11,7 @@ import ayon_api from ayon_core.pipeline import get_current_project_name, colorspace from ayon_core.settings import get_project_settings from ayon_core.pipeline.context_tools import ( - get_current_project_folder, + get_current_folder_entity, ) from ayon_core.style import load_stylesheet from pymxs import runtime as rt @@ -222,7 +222,7 @@ def reset_scene_resolution(): contains any information regarding scene resolution. """ - folder_entity = get_current_project_folder( + folder_entity = get_current_folder_entity( fields={"attrib.resolutionWidth", "attrib.resolutionHeight"} ) folder_attributes = folder_entity["attrib"] @@ -243,7 +243,7 @@ def get_frame_range(folder_entiy=None) -> Union[Dict[str, Any], None]: """ # Set frame start/end if folder_entiy is None: - folder_entiy = get_current_project_folder() + folder_entiy = get_current_folder_entity() folder_attributes = folder_entiy["attrib"] frame_start = folder_attributes.get("frameStart") diff --git a/client/ayon_core/hosts/max/api/lib_rendersettings.py b/client/ayon_core/hosts/max/api/lib_rendersettings.py index 8a9881f032..35b6d064c1 100644 --- a/client/ayon_core/hosts/max/api/lib_rendersettings.py +++ b/client/ayon_core/hosts/max/api/lib_rendersettings.py @@ -3,7 +3,7 @@ from pymxs import runtime as rt from ayon_core.lib import Logger from ayon_core.settings import get_project_settings from ayon_core.pipeline import get_current_project_name -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.hosts.max.api.lib import ( set_render_frame_range, @@ -57,7 +57,7 @@ class RenderSettings(object): if not os.path.exists(output_dir): os.makedirs(output_dir) # hard-coded, should be customized in the setting - folder_attributes = get_current_project_folder()["attrib"] + folder_attributes = get_current_folder_entity()["attrib"] # get project resolution width = folder_attributes.get("resolutionWidth") diff --git a/client/ayon_core/hosts/max/api/pipeline.py b/client/ayon_core/hosts/max/api/pipeline.py index 4b1dcc25d3..675f36c24f 100644 --- a/client/ayon_core/hosts/max/api/pipeline.py +++ b/client/ayon_core/hosts/max/api/pipeline.py @@ -240,10 +240,10 @@ def get_previous_loaded_object(container: str): node_list(list): list of nodes which are previously loaded """ node_list = [] - sel_list = rt.getProperty(container.modifiers[0].openPypeData, "sel_list") - for obj in rt.Objects: - if str(obj) in sel_list: - node_list.append(obj) + node_transform_monitor_list = rt.getProperty( + container.modifiers[0].openPypeData, "all_handles") + for node_transform_monitor in node_transform_monitor_list: + node_list.append(node_transform_monitor.node) return node_list diff --git a/client/ayon_core/hosts/max/hooks/force_startup_script.py b/client/ayon_core/hosts/max/hooks/force_startup_script.py index 8ccd658e8f..417f0049ab 100644 --- a/client/ayon_core/hosts/max/hooks/force_startup_script.py +++ b/client/ayon_core/hosts/max/hooks/force_startup_script.py @@ -2,7 +2,7 @@ """Pre-launch to force 3ds max startup script.""" import os from ayon_core.hosts.max import MAX_HOST_DIR -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class ForceStartupScript(PreLaunchHook): diff --git a/client/ayon_core/hosts/max/hooks/inject_python.py b/client/ayon_core/hosts/max/hooks/inject_python.py index b1b36e75bd..fc9626ab87 100644 --- a/client/ayon_core/hosts/max/hooks/inject_python.py +++ b/client/ayon_core/hosts/max/hooks/inject_python.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Pre-launch hook to inject python environment.""" import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class InjectPythonPath(PreLaunchHook): diff --git a/client/ayon_core/hosts/max/hooks/set_paths.py b/client/ayon_core/hosts/max/hooks/set_paths.py index 0ee1b0dab7..f066de092e 100644 --- a/client/ayon_core/hosts/max/hooks/set_paths.py +++ b/client/ayon_core/hosts/max/hooks/set_paths.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class SetPath(PreLaunchHook): diff --git a/client/ayon_core/hosts/max/plugins/load/load_camera_fbx.py b/client/ayon_core/hosts/max/plugins/load/load_camera_fbx.py index e1de6b98f9..6f1e9988c5 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_camera_fbx.py +++ b/client/ayon_core/hosts/max/plugins/load/load_camera_fbx.py @@ -19,7 +19,7 @@ class FbxLoader(load.LoaderPlugin): """Fbx Loader.""" product_types = {"camera"} - representations = ["fbx"] + representations = {"fbx"} order = -9 icon = "code-fork" color = "white" diff --git a/client/ayon_core/hosts/max/plugins/load/load_max_scene.py b/client/ayon_core/hosts/max/plugins/load/load_max_scene.py index 1c2c5317cc..4f982dd5ba 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_max_scene.py +++ b/client/ayon_core/hosts/max/plugins/load/load_max_scene.py @@ -78,7 +78,7 @@ class MaxSceneLoader(load.LoaderPlugin): "model", } - representations = ["max"] + representations = {"max"} order = -8 icon = "code-fork" color = "green" diff --git a/client/ayon_core/hosts/max/plugins/load/load_model.py b/client/ayon_core/hosts/max/plugins/load/load_model.py index 00e675d69c..1070fce2bd 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model.py @@ -16,7 +16,7 @@ class ModelAbcLoader(load.LoaderPlugin): product_types = {"model"} label = "Load Model with Alembic" - representations = ["abc"] + representations = {"abc"} order = -10 icon = "code-fork" color = "orange" diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py b/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py index 4b87c60de0..82cad71c3e 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py @@ -18,7 +18,7 @@ class FbxModelLoader(load.LoaderPlugin): """Fbx Model Loader.""" product_types = {"model"} - representations = ["fbx"] + representations = {"fbx"} order = -9 icon = "code-fork" color = "white" diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_obj.py b/client/ayon_core/hosts/max/plugins/load/load_model_obj.py index 2330dbfc24..38f2cdf43c 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_obj.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_obj.py @@ -20,7 +20,7 @@ class ObjLoader(load.LoaderPlugin): """Obj Loader.""" product_types = {"model"} - representations = ["obj"] + representations = {"obj"} order = -9 icon = "code-fork" color = "white" diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_usd.py b/client/ayon_core/hosts/max/plugins/load/load_model_usd.py index bde23e119e..2b946eb2aa 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_usd.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_usd.py @@ -24,7 +24,7 @@ class ModelUSDLoader(load.LoaderPlugin): product_types = {"model"} label = "Load Model(USD)" - representations = ["usda"] + representations = {"usda"} order = -10 icon = "code-fork" color = "orange" diff --git a/client/ayon_core/hosts/max/plugins/load/load_pointcache.py b/client/ayon_core/hosts/max/plugins/load/load_pointcache.py index 7f515ac6a5..0743b3bb34 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_pointcache.py +++ b/client/ayon_core/hosts/max/plugins/load/load_pointcache.py @@ -20,7 +20,7 @@ class AbcLoader(load.LoaderPlugin): product_types = {"camera", "animation", "pointcache"} label = "Load Alembic" - representations = ["abc"] + representations = {"abc"} order = -10 icon = "code-fork" color = "orange" diff --git a/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py b/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py index 31d3f02ec0..2efb7c7f62 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py +++ b/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py @@ -23,7 +23,7 @@ class OxAbcLoader(load.LoaderPlugin): product_types = {"camera", "animation", "pointcache"} label = "Load Alembic with Ornatrix" - representations = ["abc"] + representations = {"abc"} order = -10 icon = "code-fork" color = "orange" diff --git a/client/ayon_core/hosts/max/plugins/load/load_pointcloud.py b/client/ayon_core/hosts/max/plugins/load/load_pointcloud.py index c0000c7a79..0e79882fc5 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_pointcloud.py +++ b/client/ayon_core/hosts/max/plugins/load/load_pointcloud.py @@ -18,7 +18,7 @@ class PointCloudLoader(load.LoaderPlugin): """Point Cloud Loader.""" product_types = {"pointcloud"} - representations = ["prt"] + representations = {"prt"} order = -8 icon = "code-fork" color = "green" diff --git a/client/ayon_core/hosts/max/plugins/load/load_redshift_proxy.py b/client/ayon_core/hosts/max/plugins/load/load_redshift_proxy.py index ff6811bd1b..22d42390d9 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_redshift_proxy.py +++ b/client/ayon_core/hosts/max/plugins/load/load_redshift_proxy.py @@ -24,7 +24,7 @@ class RedshiftProxyLoader(load.LoaderPlugin): label = "Load Redshift Proxy" product_types = {"redshiftproxy"} - representations = ["rs"] + representations = {"rs"} order = -9 icon = "code-fork" color = "white" diff --git a/client/ayon_core/hosts/max/plugins/load/load_tycache.py b/client/ayon_core/hosts/max/plugins/load/load_tycache.py index 0244e4e6fc..7a5296d933 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_tycache.py +++ b/client/ayon_core/hosts/max/plugins/load/load_tycache.py @@ -17,7 +17,7 @@ class TyCacheLoader(load.LoaderPlugin): """TyCache Loader.""" product_types = {"tycache"} - representations = ["tyc"] + representations = {"tyc"} order = -8 icon = "code-fork" color = "green" diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py b/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py index 963a601009..cecfd5fd12 100644 --- a/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py +++ b/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py @@ -38,15 +38,15 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin, context_label = "{} > {}".format(*context) instance_label = "{} > {}".format(folderPath, task) message = ( - "Instance '{}' publishes to different context than current " - "context: {}. Current context: {}".format( + "Instance '{}' publishes to different folder or task " + "than current context: {}. Current context: {}".format( instance.name, instance_label, context_label ) ) raise PublishValidationError( message=message, description=( - "## Publishing to a different context data\n" + "## Publishing to a different context folder or task\n" "There are publish instances present which are publishing " "into a different folder path or task than your current context.\n\n" "Usually this is not what you want but there can be cases " diff --git a/client/ayon_core/hosts/maya/addon.py b/client/ayon_core/hosts/maya/addon.py index c68aa4c911..1ad0fcf4cf 100644 --- a/client/ayon_core/hosts/maya/addon.py +++ b/client/ayon_core/hosts/maya/addon.py @@ -22,15 +22,15 @@ class MayaAddon(AYONAddon, IHostAddon): if norm_path not in new_python_paths: new_python_paths.append(norm_path) + # add vendor path + new_python_paths.append( + os.path.join(MAYA_ROOT_DIR, "vendor", "python") + ) env["PYTHONPATH"] = os.pathsep.join(new_python_paths) # Set default environments envs = { "AYON_LOG_NO_COLORS": "1", - # For python module 'qtpy' - "QT_API": "PySide2", - # For python module 'Qt' - "QT_PREFERRED_BINDING": "PySide2" } for key, value in envs.items(): env[key] = value diff --git a/client/ayon_core/hosts/maya/api/action.py b/client/ayon_core/hosts/maya/api/action.py index baf558036e..d845ac6066 100644 --- a/client/ayon_core/hosts/maya/api/action.py +++ b/client/ayon_core/hosts/maya/api/action.py @@ -4,7 +4,10 @@ from __future__ import absolute_import import pyblish.api import ayon_api -from ayon_core.pipeline.publish import get_errored_instances_from_context +from ayon_core.pipeline.publish import ( + get_errored_instances_from_context, + get_errored_plugins_from_context +) class GenerateUUIDsOnInvalidAction(pyblish.api.Action): @@ -112,20 +115,25 @@ class SelectInvalidAction(pyblish.api.Action): except ImportError: raise ImportError("Current host is not Maya") - errored_instances = get_errored_instances_from_context(context, - plugin=plugin) - # Get the invalid nodes for the plug-ins self.log.info("Finding invalid nodes..") invalid = list() - for instance in errored_instances: - invalid_nodes = plugin.get_invalid(instance) - if invalid_nodes: - if isinstance(invalid_nodes, (list, tuple)): - invalid.extend(invalid_nodes) - else: - self.log.warning("Plug-in returned to be invalid, " - "but has no selectable nodes.") + if issubclass(plugin, pyblish.api.ContextPlugin): + errored_plugins = get_errored_plugins_from_context(context) + if plugin in errored_plugins: + invalid = plugin.get_invalid(context) + else: + errored_instances = get_errored_instances_from_context( + context, plugin=plugin + ) + for instance in errored_instances: + invalid_nodes = plugin.get_invalid(instance) + if invalid_nodes: + if isinstance(invalid_nodes, (list, tuple)): + invalid.extend(invalid_nodes) + else: + self.log.warning("Plug-in returned to be invalid, " + "but has no selectable nodes.") # Ensure unique (process each node only once) invalid = list(set(invalid)) diff --git a/client/ayon_core/hosts/maya/api/customize.py b/client/ayon_core/hosts/maya/api/customize.py index 4db8819ff5..16255f69ba 100644 --- a/client/ayon_core/hosts/maya/api/customize.py +++ b/client/ayon_core/hosts/maya/api/customize.py @@ -113,7 +113,9 @@ def override_toolbox_ui(): annotation="Look Manager", label="Look Manager", image=os.path.join(icons, "lookmanager.png"), - command=show_look_assigner, + command=lambda: show_look_assigner( + parent=parent_widget + ), width=icon_size, height=icon_size, parent=parent diff --git a/client/ayon_core/hosts/maya/api/fbx.py b/client/ayon_core/hosts/maya/api/fbx.py index 97e95d2ec4..939da4011b 100644 --- a/client/ayon_core/hosts/maya/api/fbx.py +++ b/client/ayon_core/hosts/maya/api/fbx.py @@ -2,8 +2,6 @@ """Tools to work with FBX.""" import logging -from pyblish.api import Instance - from maya import cmds # noqa import maya.mel as mel # noqa from ayon_core.hosts.maya.api.lib import maintained_selection @@ -146,7 +144,6 @@ class FBXExtractor: return options def set_options_from_instance(self, instance): - # type: (Instance) -> None """Sets FBX export options from data in the instance. Args: diff --git a/client/ayon_core/hosts/maya/api/lib.py b/client/ayon_core/hosts/maya/api/lib.py index acfac760e6..321bcbc0b5 100644 --- a/client/ayon_core/hosts/maya/api/lib.py +++ b/client/ayon_core/hosts/maya/api/lib.py @@ -37,7 +37,7 @@ from ayon_core.pipeline import ( AYON_CONTAINER_ID, ) from ayon_core.lib import NumberDef -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_task_entity from ayon_core.pipeline.create import CreateContext from ayon_core.lib.profiles_filtering import filter_profiles @@ -1519,24 +1519,30 @@ def extract_alembic(file, # region ID -def get_id_required_nodes(referenced_nodes=False, nodes=None): - """Filter out any node which are locked (reference) or readOnly +def get_id_required_nodes(referenced_nodes=False, + nodes=None, + existing_ids=True): + """Return nodes that should receive a `cbId` attribute. + + This includes only mesh and curve nodes, parent transforms of the shape + nodes, file texture nodes and object sets (including shading engines). + + This filters out any node which is locked, referenced, read-only, + intermediate object. Args: - referenced_nodes (bool): set True to filter out reference nodes + referenced_nodes (bool): set True to include referenced nodes nodes (list, Optional): nodes to consider + existing_ids (bool): set True to include nodes with `cbId` attribute + Returns: nodes (set): list of filtered nodes """ - lookup = None - if nodes is None: - # Consider all nodes - nodes = cmds.ls() - else: - # Build a lookup for the only allowed nodes in output based - # on `nodes` input of the function (+ ensure long names) - lookup = set(cmds.ls(nodes, long=True)) + if nodes is not None and not nodes: + # User supplied an empty `nodes` list to check so all we can + # do is return the empty result + return set() def _node_type_exists(node_type): try: @@ -1545,63 +1551,142 @@ def get_id_required_nodes(referenced_nodes=False, nodes=None): except RuntimeError: return False + def iterate(maya_iterator): + while not maya_iterator.isDone(): + yield maya_iterator.thisNode() + maya_iterator.next() + # `readOnly` flag is obsolete as of Maya 2016 therefore we explicitly # remove default nodes and reference nodes - camera_shapes = ["frontShape", "sideShape", "topShape", "perspShape"] + default_camera_shapes = { + "frontShape", "sideShape", "topShape", "perspShape" + } - ignore = set() - if not referenced_nodes: - ignore |= set(cmds.ls(long=True, referencedNodes=True)) - - # list all defaultNodes to filter out from the rest - ignore |= set(cmds.ls(long=True, defaultNodes=True)) - ignore |= set(cmds.ls(camera_shapes, long=True)) - - # Remove Turtle from the result of `cmds.ls` if Turtle is loaded - # TODO: This should be a less specific check for a single plug-in. - if _node_type_exists("ilrBakeLayer"): - ignore |= set(cmds.ls(type="ilrBakeLayer", long=True)) - - # Establish set of nodes types to include - types = ["objectSet", "file", "mesh", "nurbsCurve", "nurbsSurface"] + # The filtered types do not include transforms because we only want the + # parent transforms that have a child shape that we filtered to, so we + # include the parents here + types = ["mesh", "nurbsCurve", "nurbsSurface", "file", "objectSet"] # Check if plugin nodes are available for Maya by checking if the plugin # is loaded if cmds.pluginInfo("pgYetiMaya", query=True, loaded=True): types.append("pgYetiMaya") - # We *always* ignore intermediate shapes, so we filter them out directly - nodes = cmds.ls(nodes, type=types, long=True, noIntermediate=True) + iterator_type = OpenMaya.MIteratorType() + # This tries to be closest matching API equivalents of `types` variable + iterator_type.filterList = [ + OpenMaya.MFn.kMesh, # mesh + OpenMaya.MFn.kNurbsSurface, # nurbsSurface + OpenMaya.MFn.kNurbsCurve, # nurbsCurve + OpenMaya.MFn.kFileTexture, # file + OpenMaya.MFn.kSet, # objectSet + OpenMaya.MFn.kPluginShape # pgYetiMaya + ] + it = OpenMaya.MItDependencyNodes(iterator_type) - # The items which need to pass the id to their parent - # Add the collected transform to the nodes - dag = cmds.ls(nodes, type="dagNode", long=True) # query only dag nodes - transforms = cmds.listRelatives(dag, - parent=True, - fullPath=True) or [] + fn_dep = OpenMaya.MFnDependencyNode() + fn_dag = OpenMaya.MFnDagNode() + result = set() - nodes = set(nodes) - nodes |= set(transforms) + def _should_include_parents(obj): + """Whether to include parents of obj in output""" + if not obj.hasFn(OpenMaya.MFn.kShape): + return False - nodes -= ignore # Remove the ignored nodes - if not nodes: - return nodes + fn_dag.setObject(obj) + if fn_dag.isIntermediateObject: + return False - # Ensure only nodes from the input `nodes` are returned when a - # filter was applied on function call because we also iterated - # to parents and alike - if lookup is not None: - nodes &= lookup + # Skip default cameras + if ( + obj.hasFn(OpenMaya.MFn.kCamera) and + fn_dag.name() in default_camera_shapes + ): + return False - # Avoid locked nodes - nodes_list = list(nodes) - locked = cmds.lockNode(nodes_list, query=True, lock=True) - for node, lock in zip(nodes_list, locked): - if lock: - log.warning("Skipping locked node: %s" % node) - nodes.remove(node) + return True - return nodes + def _add_to_result_if_valid(obj): + """Add to `result` if the object should be included""" + fn_dep.setObject(obj) + if not existing_ids and fn_dep.hasAttribute("cbId"): + return + + if not referenced_nodes and fn_dep.isFromReferencedFile: + return + + if fn_dep.isDefaultNode: + return + + if fn_dep.isLocked: + return + + # Skip default cameras + if ( + obj.hasFn(OpenMaya.MFn.kCamera) and + fn_dep.name() in default_camera_shapes + ): + return + + if obj.hasFn(OpenMaya.MFn.kDagNode): + # DAG nodes + fn_dag.setObject(obj) + + # Skip intermediate objects + if fn_dag.isIntermediateObject: + return + + # DAG nodes can be instanced and thus may have multiple paths. + # We need to identify each path + paths = OpenMaya.MDagPath.getAllPathsTo(obj) + for dag in paths: + path = dag.fullPathName() + result.add(path) + else: + # Dependency node + path = fn_dep.name() + result.add(path) + + for obj in iterate(it): + # For any non-intermediate shape node always include the parent + # even if we exclude the shape itself (e.g. when locked, default) + if _should_include_parents(obj): + fn_dag.setObject(obj) + parents = [ + fn_dag.parent(index) for index in range(fn_dag.parentCount()) + ] + for parent_obj in parents: + _add_to_result_if_valid(parent_obj) + + _add_to_result_if_valid(obj) + + if not result: + return result + + # Exclude some additional types + exclude_types = [] + if _node_type_exists("ilrBakeLayer"): + # Remove Turtle from the result if Turtle is loaded + exclude_types.append("ilrBakeLayer") + + if exclude_types: + exclude_nodes = set(cmds.ls(nodes, long=True, type=exclude_types)) + if exclude_nodes: + result -= exclude_nodes + + # Filter to explicit input nodes if provided + if nodes is not None: + # The amount of input nodes to filter to can be large and querying + # many nodes can be slow in Maya. As such we want to try and reduce + # it as much as possible, so we include the type filter to try and + # reduce the result of `maya.cmds.ls` here. + nodes = set(cmds.ls(nodes, long=True, type=types + ["dagNode"])) + if nodes: + result &= nodes + else: + return set() + + return result def get_id(node): @@ -1832,6 +1917,29 @@ def apply_attributes(attributes, nodes_by_id): set_attribute(attr, value, node) +def is_valid_reference_node(reference_node): + """Return whether Maya considers the reference node a valid reference. + + Maya might report an error when using `maya.cmds.referenceQuery`: + Reference node 'reference_node' is not associated with a reference file. + + Note that this does *not* check whether the reference node points to an + existing file. Instead it only returns whether maya considers it valid + and thus is not an unassociated reference node + + Arguments: + reference_node (str): Reference node name + + Returns: + bool: Whether reference node is a valid reference + + """ + sel = OpenMaya.MSelectionList() + sel.add(reference_node) + depend_node = sel.getDependNode(0) + return OpenMaya.MFnReference(depend_node).isValidReference() + + def get_container_members(container): """Returns the members of a container. This includes the nodes from any loaded references in the container. @@ -1857,7 +1965,16 @@ def get_container_members(container): if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"): continue - reference_members = cmds.referenceQuery(ref, nodes=True, dagPath=True) + try: + reference_members = cmds.referenceQuery(ref, + nodes=True, + dagPath=True) + except RuntimeError: + # Ignore reference nodes that are not associated with a + # referenced file on which `referenceQuery` command fails + if not is_valid_reference_node(ref): + continue + raise reference_members = cmds.ls(reference_members, long=True, objectsOnly=True) @@ -1876,18 +1993,9 @@ def list_looks(project_name, folder_id): list[dict[str, Any]]: List of look products. """ - # # get all products with look leading in - # the name associated with the asset - # TODO this should probably look for product type 'look' instead of - # checking product name that can not start with product type - product_entities = ayon_api.get_products( - project_name, folder_ids=[folder_id] - ) - return [ - product_entity - for product_entity in product_entities - if product_entity["name"].startswith("look") - ] + return list(ayon_api.get_products( + project_name, folder_ids=[folder_id], product_types={"look"} + )) def assign_look_by_version(nodes, version_id): @@ -1906,12 +2014,15 @@ def assign_look_by_version(nodes, version_id): project_name = get_current_project_name() # Get representations of shader file and relationships - look_representation = ayon_api.get_representation_by_name( - project_name, "ma", version_id - ) - json_representation = ayon_api.get_representation_by_name( - project_name, "json", version_id - ) + representations = list(ayon_api.get_representations( + project_name=project_name, + representation_names={"ma", "json"}, + version_ids=[version_id] + )) + look_representation = next( + repre for repre in representations if repre["name"] == "ma") + json_representation = next( + repre for repre in representations if repre["name"] == "json") # See if representation is already loaded, if so reuse it. host = registered_host() @@ -1948,7 +2059,7 @@ def assign_look_by_version(nodes, version_id): apply_shaders(relationships, shader_nodes, nodes) -def assign_look(nodes, product_name="lookDefault"): +def assign_look(nodes, product_name="lookMain"): """Assigns a look to a node. Optimizes the nodes by grouping by folder id and finding @@ -1981,14 +2092,10 @@ def assign_look(nodes, product_name="lookDefault"): product_entity["id"] for product_entity in product_entities_by_folder_id.values() } - last_version_entities = ayon_api.get_last_versions( + last_version_entities_by_product_id = ayon_api.get_last_versions( project_name, product_ids ) - last_version_entities_by_product_id = { - last_version_entity["productId"]: last_version_entity - for last_version_entity in last_version_entities - } for folder_id, asset_nodes in grouped.items(): product_entity = product_entities_by_folder_id.get(folder_id) @@ -2125,22 +2232,6 @@ def get_related_sets(node): """ - # Ignore specific suffices - ignore_suffices = ["out_SET", "controls_SET", "_INST", "_CON"] - - # Default nodes to ignore - defaults = {"defaultLightSet", "defaultObjectSet"} - - # Ids to ignore - ignored = { - AVALON_INSTANCE_ID, - AVALON_CONTAINER_ID, - AYON_INSTANCE_ID, - AYON_CONTAINER_ID, - } - - view_sets = get_isolate_view_sets() - sets = cmds.listSets(object=node, extendToShape=False) if not sets: return [] @@ -2151,6 +2242,14 @@ def get_related_sets(node): # returned by `cmds.listSets(allSets=True)` sets = cmds.ls(sets) + # Ids to ignore + ignored = { + AVALON_INSTANCE_ID, + AVALON_CONTAINER_ID, + AYON_INSTANCE_ID, + AYON_CONTAINER_ID, + } + # Ignore `avalon.container` sets = [ s for s in sets @@ -2159,21 +2258,31 @@ def get_related_sets(node): or cmds.getAttr(f"{s}.id") not in ignored ) ] + if not sets: + return sets # Exclude deformer sets (`type=2` for `maya.cmds.listSets`) - deformer_sets = cmds.listSets(object=node, - extendToShape=False, - type=2) or [] - deformer_sets = set(deformer_sets) # optimize lookup - sets = [s for s in sets if s not in deformer_sets] + exclude_sets = cmds.listSets(object=node, + extendToShape=False, + type=2) or [] + exclude_sets = set(exclude_sets) # optimize lookup + + # Default nodes to ignore + exclude_sets.update({"defaultLightSet", "defaultObjectSet"}) + + # Filter out the sets to exclude + sets = [s for s in sets if s not in exclude_sets] # Ignore when the set has a specific suffix - sets = [s for s in sets if not any(s.endswith(x) for x in ignore_suffices)] + ignore_suffices = ("out_SET", "controls_SET", "_INST", "_CON") + sets = [s for s in sets if not s.endswith(ignore_suffices)] + if not sets: + return sets # Ignore viewport filter view sets (from isolate select and # viewports) + view_sets = get_isolate_view_sets() sets = [s for s in sets if s not in view_sets] - sets = [s for s in sets if s not in defaults] return sets @@ -2444,12 +2553,10 @@ def set_scene_fps(fps, update=True): cmds.currentUnit(time=unit, updateAnimation=update) # Set time slider data back to previous state - cmds.playbackOptions(edit=True, minTime=start_frame) - cmds.playbackOptions(edit=True, maxTime=end_frame) - - # Set animation data - cmds.playbackOptions(edit=True, animationStartTime=animation_start) - cmds.playbackOptions(edit=True, animationEndTime=animation_end) + cmds.playbackOptions(minTime=start_frame, + maxTime=end_frame, + animationStartTime=animation_start, + animationEndTime=animation_end) cmds.currentTime(current_frame, edit=True, update=True) @@ -2525,7 +2632,7 @@ def get_fps_for_current_context(): def get_frame_range(include_animation_range=False): - """Get the current folder frame range and handles. + """Get the current task frame range and handles. Args: include_animation_range (bool, optional): Whether to include @@ -2533,25 +2640,34 @@ def get_frame_range(include_animation_range=False): range of the timeline. It is excluded by default. Returns: - dict: Folder's expected frame range values. + dict: Task's expected frame range values. """ # Set frame start/end project_name = get_current_project_name() folder_path = get_current_folder_path() - folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) - folder_attributes = folder_entity["attrib"] + task_name = get_current_task_name() - frame_start = folder_attributes.get("frameStart") - frame_end = folder_attributes.get("frameEnd") + folder_entity = ayon_api.get_folder_by_path( + project_name, + folder_path, + fields={"id"}) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + + task_attributes = task_entity["attrib"] + + frame_start = task_attributes.get("frameStart") + frame_end = task_attributes.get("frameEnd") if frame_start is None or frame_end is None: cmds.warning("No edit information found for '{}'".format(folder_path)) return - handle_start = folder_attributes.get("handleStart") or 0 - handle_end = folder_attributes.get("handleEnd") or 0 + handle_start = task_attributes.get("handleStart") or 0 + handle_end = task_attributes.get("handleEnd") or 0 frame_range = { "frameStart": frame_start, @@ -2565,14 +2681,10 @@ def get_frame_range(include_animation_range=False): # Some usages of this function use the full dictionary to define # instance attributes for which we want to exclude the animation # keys. That is why these are excluded by default. - task_name = get_current_task_name() + settings = get_project_settings(project_name) - task_entity = ayon_api.get_task_by_name( - project_name, folder_entity["id"], task_name - ) - task_type = None - if task_entity: - task_type = task_entity["taskType"] + + task_type = task_entity["taskType"] include_handles_settings = settings["maya"]["include_handles"] @@ -2634,48 +2746,131 @@ def reset_frame_range(playback=True, render=True, fps=True): def reset_scene_resolution(): """Apply the scene resolution from the project definition - scene resolution can be overwritten by an folder if the folder.attrib - contains any information regarding scene resolution . + The scene resolution will be retrieved from the current task entity's + attributes. Returns: None """ - folder_attributes = get_current_project_folder()["attrib"] + task_attributes = get_current_task_entity(fields={"attrib"})["attrib"] # Set resolution - width = folder_attributes.get("resolutionWidth", 1920) - height = folder_attributes.get("resolutionHeight", 1080) - pixelAspect = folder_attributes.get("pixelAspect", 1) + width = task_attributes.get("resolutionWidth", 1920) + height = task_attributes.get("resolutionHeight", 1080) + pixel_aspect = task_attributes.get("pixelAspect", 1) - set_scene_resolution(width, height, pixelAspect) + set_scene_resolution(width, height, pixel_aspect) -def set_context_settings(): +def set_context_settings( + fps=True, + resolution=True, + frame_range=True, + colorspace=True +): """Apply the project settings from the project definition - Settings can be overwritten by an folder if the folder.attrib contains + Settings can be overwritten by an asset if the asset.data contains any information regarding those settings. - Examples of settings: - fps - resolution - renderer + Args: + fps (bool): Whether to set the scene FPS. + resolution (bool): Whether to set the render resolution. + frame_range (bool): Whether to reset the time slide frame ranges. + colorspace (bool): Whether to reset the colorspace. Returns: None """ - # Set project fps - set_scene_fps(get_fps_for_current_context()) + if fps: + # Set project fps + set_scene_fps(get_fps_for_current_context()) - reset_scene_resolution() + if resolution: + reset_scene_resolution() # Set frame range. - reset_frame_range() + if frame_range: + reset_frame_range(fps=False) # Set colorspace - set_colorspace() + if colorspace: + set_colorspace() + + +def prompt_reset_context(): + """Prompt the user what context settings to reset. + This prompt is used on saving to a different task to allow the scene to + get matched to the new context. + """ + # TODO: Cleanup this prototyped mess of imports and odd dialog + from ayon_core.tools.attribute_defs.dialog import ( + AttributeDefinitionsDialog + ) + from ayon_core.style import load_stylesheet + from ayon_core.lib import BoolDef, UILabelDef + + definitions = [ + UILabelDef( + label=( + "You are saving your workfile into a different folder or task." + "\n\n" + "Would you like to update some settings to the new context?\n" + ) + ), + BoolDef( + "fps", + label="FPS", + tooltip="Reset workfile FPS", + default=True + ), + BoolDef( + "frame_range", + label="Frame Range", + tooltip="Reset workfile start and end frame ranges", + default=True + ), + BoolDef( + "resolution", + label="Resolution", + tooltip="Reset workfile resolution", + default=True + ), + BoolDef( + "colorspace", + label="Colorspace", + tooltip="Reset workfile resolution", + default=True + ), + BoolDef( + "instances", + label="Publish instances", + tooltip="Update all publish instance's folder and task to match " + "the new folder and task", + default=True + ), + ] + + dialog = AttributeDefinitionsDialog(definitions) + dialog.setWindowTitle("Saving to different context.") + dialog.setStyleSheet(load_stylesheet()) + if not dialog.exec_(): + return None + + options = dialog.get_values() + with suspended_refresh(): + set_context_settings( + fps=options["fps"], + resolution=options["resolution"], + frame_range=options["frame_range"], + colorspace=options["colorspace"] + ) + if options["instances"]: + update_content_on_context_change() + + dialog.deleteLater() # Valid FPS @@ -3051,7 +3246,7 @@ def load_capture_preset(data): return options -def get_attr_in_layer(attr, layer): +def get_attr_in_layer(attr, layer, as_string=True): """Return attribute value in specified renderlayer. Same as cmds.getAttr but this gets the attribute's value in a @@ -3069,6 +3264,7 @@ def get_attr_in_layer(attr, layer): Args: attr (str): attribute name, ex. "node.attribute" layer (str): layer name + as_string (bool): whether attribute should convert to a string value Returns: The return value from `maya.cmds.getAttr` @@ -3078,7 +3274,8 @@ def get_attr_in_layer(attr, layer): try: if cmds.mayaHasRenderSetup(): from . import lib_rendersetup - return lib_rendersetup.get_attr_in_layer(attr, layer) + return lib_rendersetup.get_attr_in_layer( + attr, layer, as_string=as_string) except AttributeError: pass @@ -3086,7 +3283,7 @@ def get_attr_in_layer(attr, layer): current_layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True) if layer == current_layer: - return cmds.getAttr(attr) + return cmds.getAttr(attr, asString=as_string) connections = cmds.listConnections(attr, plugs=True, @@ -3137,7 +3334,7 @@ def get_attr_in_layer(attr, layer): value *= conversion return value - return cmds.getAttr(attr) + return cmds.getAttr(attr, asString=as_string) def fix_incompatible_containers(): @@ -3166,33 +3363,46 @@ def update_content_on_context_change(): """ This will update scene content to match new folder on context change """ - scene_sets = cmds.listSets(allSets=True) - folder_entity = get_current_project_folder() - folder_attributes = folder_entity["attrib"] - new_folder_path = folder_entity["path"] - for s in scene_sets: - try: - if cmds.getAttr("{}.id".format(s)) in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - }: - attr = cmds.listAttr(s) - print(s) - if "folderPath" in attr: - print( - " - setting folder to: [ {} ]".format(new_folder_path) - ) - cmds.setAttr( - "{}.folderPath".format(s), - new_folder_path, type="string" - ) - if "frameStart" in attr: - cmds.setAttr("{}.frameStart".format(s), - folder_attributes["frameStart"]) - if "frameEnd" in attr: - cmds.setAttr("{}.frameEnd".format(s), - folder_attributes["frameEnd"],) - except ValueError: - pass + + host = registered_host() + create_context = CreateContext(host) + folder_entity = get_current_task_entity(fields={"attrib"}) + + instance_values = { + "folderPath": create_context.get_current_folder_path(), + "task": create_context.get_current_task_name(), + } + creator_attribute_values = { + "frameStart": folder_entity["attrib"]["frameStart"], + "frameEnd": folder_entity["attrib"]["frameEnd"], + } + + has_changes = False + for instance in create_context.instances: + for key, value in instance_values.items(): + if key not in instance or instance[key] == value: + continue + + # Update instance value + print(f"Updating {instance.product_name} {key} to: {value}") + instance[key] = value + has_changes = True + + creator_attributes = instance.creator_attributes + for key, value in creator_attribute_values.items(): + if ( + key not in creator_attributes + or creator_attributes[key] == value + ): + continue + + # Update instance creator attribute value + print(f"Updating {instance.product_name} {key} to: {value}") + instance[key] = value + has_changes = True + + if has_changes: + create_context.save_changes() def show_message(title, msg): @@ -3926,17 +4136,26 @@ def len_flattened(components): return n -def get_all_children(nodes): +def get_all_children(nodes, ignore_intermediate_objects=False): """Return all children of `nodes` including each instanced child. Using maya.cmds.listRelatives(allDescendents=True) includes only the first instance. As such, this function acts as an optimal replacement with a focus on a fast query. + Args: + nodes (iterable): List of nodes to get children for. + ignore_intermediate_objects (bool): Ignore any children that + are intermediate objects. + + Returns: + set: Children of input nodes. + """ sel = OpenMaya.MSelectionList() traversed = set() iterator = OpenMaya.MItDag(OpenMaya.MItDag.kDepthFirst) + fn_dag = OpenMaya.MFnDagNode() for node in nodes: if node in traversed: @@ -3953,6 +4172,13 @@ def get_all_children(nodes): iterator.next() # noqa: B305 while not iterator.isDone(): + if ignore_intermediate_objects: + fn_dag.setObject(iterator.currentItem()) + if fn_dag.isIntermediateObject: + iterator.prune() + iterator.next() # noqa: B305 + continue + path = iterator.fullPathName() if path in traversed: @@ -3963,7 +4189,7 @@ def get_all_children(nodes): traversed.add(path) iterator.next() # noqa: B305 - return list(traversed) + return traversed def get_capture_preset( @@ -4044,6 +4270,9 @@ def get_reference_node(members, log=None): if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"): continue + if not is_valid_reference_node(ref): + continue + references.add(ref) assert references, "No reference node found in container" @@ -4074,15 +4303,19 @@ def get_reference_node_parents(ref): list: The upstream parent reference nodes. """ - parent = cmds.referenceQuery(ref, - referenceNode=True, - parent=True) + def _get_parent(reference_node): + """Return parent reference node, but ignore invalid reference nodes""" + if not is_valid_reference_node(reference_node): + return + return cmds.referenceQuery(reference_node, + referenceNode=True, + parent=True) + + parent = _get_parent(ref) parents = [] while parent: parents.append(parent) - parent = cmds.referenceQuery(parent, - referenceNode=True, - parent=True) + parent = _get_parent(parent) return parents diff --git a/client/ayon_core/hosts/maya/api/lib_renderproducts.py b/client/ayon_core/hosts/maya/api/lib_renderproducts.py index 7f26145e1d..832d1c21c2 100644 --- a/client/ayon_core/hosts/maya/api/lib_renderproducts.py +++ b/client/ayon_core/hosts/maya/api/lib_renderproducts.py @@ -297,7 +297,7 @@ class ARenderProducts: """ return self._get_attr("defaultRenderGlobals", attribute) - def _get_attr(self, node_attr, attribute=None): + def _get_attr(self, node_attr, attribute=None, as_string=True): """Return the value of the attribute in the renderlayer For readability this allows passing in the attribute in two ways. @@ -317,7 +317,7 @@ class ARenderProducts: else: plug = "{}.{}".format(node_attr, attribute) - return lib.get_attr_in_layer(plug, layer=self.layer) + return lib.get_attr_in_layer(plug, layer=self.layer, as_string=as_string) @staticmethod def extract_separator(file_prefix): @@ -1133,9 +1133,24 @@ class RenderProductsRedshift(ARenderProducts): aovs = list(set(aovs) - set(ref_aovs)) products = [] + global_aov_enabled = bool( + self._get_attr("redshiftOptions.aovGlobalEnableMode", as_string=False) + ) + colorspace = lib.get_color_management_output_transform() + if not global_aov_enabled: + # only beauty output + for camera in cameras: + products.insert(0, + RenderProduct(productName="", + ext=ext, + multipart=self.multipart, + camera=camera, + colorspace=colorspace)) + return products + light_groups_enabled = False has_beauty_aov = False - colorspace = lib.get_color_management_output_transform() + for aov in aovs: enabled = self._get_attr(aov, "enabled") if not enabled: diff --git a/client/ayon_core/hosts/maya/api/lib_rendersettings.py b/client/ayon_core/hosts/maya/api/lib_rendersettings.py index 905e8c69af..f9e243146a 100644 --- a/client/ayon_core/hosts/maya/api/lib_rendersettings.py +++ b/client/ayon_core/hosts/maya/api/lib_rendersettings.py @@ -7,7 +7,7 @@ from ayon_core.lib import Logger from ayon_core.settings import get_project_settings from ayon_core.pipeline import CreatorError, get_current_project_name -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.hosts.maya.api.lib import reset_frame_range @@ -77,7 +77,7 @@ class RenderSettings(object): renderer = cmds.getAttr( 'defaultRenderGlobals.currentRenderer').lower() - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() folder_attributes = folder_entity["attrib"] # project_settings/maya/create/CreateRender/aov_separator try: diff --git a/client/ayon_core/hosts/maya/api/lib_rendersetup.py b/client/ayon_core/hosts/maya/api/lib_rendersetup.py index c2b5ec843c..6dca8eb6dd 100644 --- a/client/ayon_core/hosts/maya/api/lib_rendersetup.py +++ b/client/ayon_core/hosts/maya/api/lib_rendersetup.py @@ -77,7 +77,7 @@ def get_rendersetup_layer(layer): if conn.endswith(".legacyRenderLayer")), None) -def get_attr_in_layer(node_attr, layer): +def get_attr_in_layer(node_attr, layer, as_string=True): """Return attribute value in Render Setup layer. This will only work for attributes which can be @@ -124,7 +124,7 @@ def get_attr_in_layer(node_attr, layer): node = history_overrides[-1] if history_overrides else override node_attr_ = node + ".original" - return get_attribute(node_attr_, asString=True) + return get_attribute(node_attr_, asString=as_string) layer = get_rendersetup_layer(layer) rs = renderSetup.instance() @@ -144,7 +144,7 @@ def get_attr_in_layer(node_attr, layer): # we will let it error out. rs.switchToLayer(current_layer) - return get_attribute(node_attr, asString=True) + return get_attribute(node_attr, asString=as_string) overrides = get_attr_overrides(node_attr, layer) default_layer_value = get_default_layer_value(node_attr) diff --git a/client/ayon_core/hosts/maya/api/menu.py b/client/ayon_core/hosts/maya/api/menu.py index 0cb7edd40d..e3ef50cdc0 100644 --- a/client/ayon_core/hosts/maya/api/menu.py +++ b/client/ayon_core/hosts/maya/api/menu.py @@ -1,4 +1,5 @@ import os +import json import logging from functools import partial @@ -214,8 +215,18 @@ def install(project_settings): ) return - config = project_settings["maya"]["scriptsmenu"]["definition"] - _menu = project_settings["maya"]["scriptsmenu"]["name"] + menu_settings = project_settings["maya"]["scriptsmenu"] + menu_name = menu_settings["name"] + config = menu_settings["definition"] + + if menu_settings.get("definition_type") == "definition_json": + data = menu_settings["definition_json"] + try: + config = json.loads(data) + except json.JSONDecodeError as exc: + print("Skipping studio menu, error decoding JSON definition.") + log.error(exc) + return if not config: log.warning("Skipping studio menu, no definition found.") @@ -223,8 +234,8 @@ def install(project_settings): # run the launcher for Maya menu studio_menu = launchformaya.main( - title=_menu.title(), - objectName=_menu.title().lower().replace(" ", "_") + title=menu_name.title(), + objectName=menu_name.title().lower().replace(" ", "_") ) # apply configuration diff --git a/client/ayon_core/hosts/maya/api/pipeline.py b/client/ayon_core/hosts/maya/api/pipeline.py index b3e401b91e..864a0c1599 100644 --- a/client/ayon_core/hosts/maya/api/pipeline.py +++ b/client/ayon_core/hosts/maya/api/pipeline.py @@ -67,6 +67,9 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") AVALON_CONTAINERS = ":AVALON_CONTAINERS" +# Track whether the workfile tool is about to save +_about_to_save = False + class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): name = "maya" @@ -577,10 +580,15 @@ def on_save(): _remove_workfile_lock() # Generate ids of the current context on nodes in the scene - nodes = lib.get_id_required_nodes(referenced_nodes=False) + nodes = lib.get_id_required_nodes(referenced_nodes=False, + existing_ids=False) for node, new_id in lib.generate_ids(nodes): lib.set_id(node, new_id, overwrite=False) + # We are now starting the actual save directly + global _about_to_save + _about_to_save = False + def on_open(): """On scene open let's assume the containers have changed.""" @@ -646,9 +654,10 @@ def on_task_changed(): "Can't set project for new context because path does not exist: {}" ).format(workdir)) - with lib.suspended_refresh(): - lib.set_context_settings() - lib.update_content_on_context_change() + global _about_to_save + if not lib.IS_HEADLESS and _about_to_save: + # Let's prompt the user to update the context settings or not + lib.prompt_reset_context() def before_workfile_open(): @@ -664,6 +673,9 @@ def before_workfile_save(event): if workdir_path: create_workspace_mel(workdir_path, project_name) + global _about_to_save + _about_to_save = True + def workfile_save_before_xgen(event): """Manage Xgen external files when switching context. diff --git a/client/ayon_core/hosts/maya/api/workfile_template_builder.py b/client/ayon_core/hosts/maya/api/workfile_template_builder.py index cb25a722f0..75386d7e64 100644 --- a/client/ayon_core/hosts/maya/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/maya/api/workfile_template_builder.py @@ -286,7 +286,7 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): if not container: return - roots = cmds.sets(container, q=True) + roots = cmds.sets(container, q=True) or [] ref_node = None try: ref_node = get_reference_node(roots) diff --git a/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py b/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py index ed294da125..45785ac354 100644 --- a/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py +++ b/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class MayaPreAutoLoadPlugins(PreLaunchHook): diff --git a/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py b/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py index 3fd81ceff4..683b4c59c7 100644 --- a/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py +++ b/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.maya.lib import create_workspace_mel diff --git a/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py b/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py index 6bf678474f..a54f17c6c6 100644 --- a/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py +++ b/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class MayaPreOpenWorkfilePostInitialization(PreLaunchHook): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_render.py b/client/ayon_core/hosts/maya/plugins/create/create_render.py index 213d5b543e..e5a8d4dbd8 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_render.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_render.py @@ -40,8 +40,15 @@ class CreateRenderlayer(plugin.RenderlayerCreator): def create(self, product_name, instance_data, pre_create_data): # Only allow a single render instance to exist if self._get_singleton_node(): - raise CreatorError("A Render instance already exists - only " - "one can be configured.") + raise CreatorError( + "A Render instance already exists - only one can be " + "configured.\n\n" + "To render multiple render layers, create extra Render Setup " + "Layers via Maya's Render Setup UI.\n" + "Then refresh the publisher to detect the new layers for " + "rendering.\n\n" + "With a render instance present all Render Setup layers in " + "your workfile are renderable instances.") # Apply default project render settings on create if self.render_settings.get("apply_render_settings"): diff --git a/client/ayon_core/hosts/maya/plugins/load/_load_animation.py b/client/ayon_core/hosts/maya/plugins/load/_load_animation.py index 884bdd7538..393f6b0115 100644 --- a/client/ayon_core/hosts/maya/plugins/load/_load_animation.py +++ b/client/ayon_core/hosts/maya/plugins/load/_load_animation.py @@ -51,7 +51,7 @@ class AbcLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): "camera", "pointcache", } - representations = ["abc"] + representations = {"abc"} label = "Reference animation" order = -10 @@ -81,7 +81,7 @@ class FbxLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): "animation", "camera", } - representations = ["fbx"] + representations = {"fbx"} label = "Reference animation" order = -10 diff --git a/client/ayon_core/hosts/maya/plugins/load/actions.py b/client/ayon_core/hosts/maya/plugins/load/actions.py index a98fe97692..8bef219812 100644 --- a/client/ayon_core/hosts/maya/plugins/load/actions.py +++ b/client/ayon_core/hosts/maya/plugins/load/actions.py @@ -19,7 +19,7 @@ class SetFrameRangeLoader(load.LoaderPlugin): "proxyAbc", "pointcache", } - representations = ["abc"] + representations = {"abc"} label = "Set frame range" order = 11 @@ -54,7 +54,7 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): "proxyAbc", "pointcache", } - representations = ["abc"] + representations = {"abc"} label = "Set frame range (with handles)" order = 12 @@ -94,7 +94,7 @@ class ImportMayaLoader(ayon_core.hosts.maya.api.plugin.Loader): so you could also use it as a new base. """ - representations = ["ma", "mb", "obj"] + representations = {"ma", "mb", "obj"} product_types = { "model", "pointcache", @@ -125,6 +125,11 @@ class ImportMayaLoader(ayon_core.hosts.maya.api.plugin.Loader): ) ] + @classmethod + def apply_settings(cls, project_settings): + super(ImportMayaLoader, cls).apply_settings(project_settings) + cls.enabled = cls.load_settings["import_loader"].get("enabled", True) + def load(self, context, name=None, namespace=None, data=None): import maya.cmds as cmds diff --git a/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py b/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py index 7170c30422..4b7d2f42ab 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py @@ -31,7 +31,7 @@ class ArnoldStandinLoader(load.LoaderPlugin): product_types = { "ass", "animation", "model", "proxyAbc", "pointcache", "usd" } - representations = ["ass", "abc", "usda", "usdc", "usd"] + representations = {"ass", "abc", "usda", "usdc", "usd"} label = "Load as Arnold standin" order = -5 diff --git a/client/ayon_core/hosts/maya/plugins/load/load_assembly.py b/client/ayon_core/hosts/maya/plugins/load/load_assembly.py index a0cbf91905..0fcbc6bd07 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_assembly.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_assembly.py @@ -13,7 +13,7 @@ from ayon_core.hosts.maya.api import setdress class AssemblyLoader(load.LoaderPlugin): product_types = {"assembly"} - representations = ["json"] + representations = {"json"} label = "Load Set Dress" order = -9 diff --git a/client/ayon_core/hosts/maya/plugins/load/load_audio.py b/client/ayon_core/hosts/maya/plugins/load/load_audio.py index 0a40993fcd..228189f1a1 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_audio.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_audio.py @@ -13,7 +13,7 @@ class AudioLoader(load.LoaderPlugin): product_types = {"audio"} label = "Load audio" - representations = ["wav"] + representations = {"wav"} icon = "volume-up" color = "orange" diff --git a/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py b/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py index 9689282ae9..9832d2d657 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py @@ -14,7 +14,7 @@ class GpuCacheLoader(load.LoaderPlugin): """Load Alembic as gpuCache""" product_types = {"model", "animation", "proxyAbc", "pointcache"} - representations = ["abc", "gpu_cache"] + representations = {"abc", "gpu_cache"} label = "Load Gpu Cache" order = -5 diff --git a/client/ayon_core/hosts/maya/plugins/load/load_image.py b/client/ayon_core/hosts/maya/plugins/load/load_image.py index 3641655d49..5b0858ce70 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_image.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_image.py @@ -93,7 +93,7 @@ class FileNodeLoader(load.LoaderPlugin): product_types = {"image", "plate", "render"} label = "Load file node" - representations = ["exr", "tif", "png", "jpg"] + representations = {"exr", "tif", "png", "jpg"} icon = "image" color = "orange" order = 2 diff --git a/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py b/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py index 7d6f7e26cf..15c7654c52 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py @@ -89,7 +89,7 @@ class ImagePlaneLoader(load.LoaderPlugin): product_types = {"image", "plate", "render"} label = "Load imagePlane" - representations = ["mov", "exr", "preview", "png", "jpg"] + representations = {"mov", "exr", "preview", "png", "jpg"} icon = "image" color = "orange" @@ -142,9 +142,21 @@ class ImagePlaneLoader(load.LoaderPlugin): with namespaced(namespace): # Create inside the namespace image_plane_transform, image_plane_shape = cmds.imagePlane( - fileName=context["representation"]["data"]["path"], + fileName=self.filepath_from_context(context), camera=camera ) + + # Set colorspace + colorspace = self.get_colorspace(context["representation"]) + if colorspace: + cmds.setAttr( + "{}.ignoreColorSpaceFileRules".format(image_plane_shape), + True + ) + cmds.setAttr("{}.colorSpace".format(image_plane_shape), + colorspace, type="string") + + # Set offset frame range start_frame = cmds.playbackOptions(query=True, min=True) end_frame = cmds.playbackOptions(query=True, max=True) @@ -159,7 +171,7 @@ class ImagePlaneLoader(load.LoaderPlugin): plug = "{}.{}".format(image_plane_shape, attr) cmds.setAttr(plug, value) - movie_representations = ["mov", "preview"] + movie_representations = {"mov", "preview"} if context["representation"]["name"] in movie_representations: cmds.setAttr(image_plane_shape + ".type", 2) @@ -216,6 +228,15 @@ class ImagePlaneLoader(load.LoaderPlugin): repre_entity["id"], type="string") + colorspace = self.get_colorspace(repre_entity) + if colorspace: + cmds.setAttr( + "{}.ignoreColorSpaceFileRules".format(image_plane_shape), + True + ) + cmds.setAttr("{}.colorSpace".format(image_plane_shape), + colorspace, type="string") + # Set frame range. start_frame = folder_entity["attrib"]["frameStart"] end_frame = folder_entity["attrib"]["frameEnd"] @@ -243,3 +264,12 @@ class ImagePlaneLoader(load.LoaderPlugin): deleteNamespaceContent=True) except RuntimeError: pass + + def get_colorspace(self, representation): + + data = representation.get("data", {}).get("colorspaceData", {}) + if not data: + return + + colorspace = data.get("colorspace") + return colorspace diff --git a/client/ayon_core/hosts/maya/plugins/load/load_look.py b/client/ayon_core/hosts/maya/plugins/load/load_look.py index f126a1df26..af0e000dd2 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_look.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_look.py @@ -18,7 +18,7 @@ class LookLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): """Specific loader for lookdev""" product_types = {"look"} - representations = ["ma"] + representations = {"ma"} label = "Reference look" order = -10 diff --git a/client/ayon_core/hosts/maya/plugins/load/load_matchmove.py b/client/ayon_core/hosts/maya/plugins/load/load_matchmove.py index 05da173bb7..b19b14b1aa 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_matchmove.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_matchmove.py @@ -9,7 +9,7 @@ class MatchmoveLoader(load.LoaderPlugin): """ product_types = {"matchmove"} - representations = ["py", "mel"] + representations = {"py", "mel"} defaults = ["Camera", "Object", "Mocap"] label = "Run matchmove script" diff --git a/client/ayon_core/hosts/maya/plugins/load/load_maya_usd.py b/client/ayon_core/hosts/maya/plugins/load/load_maya_usd.py index cd73c26de1..628a25e574 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_maya_usd.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_maya_usd.py @@ -17,7 +17,7 @@ class MayaUsdLoader(load.LoaderPlugin): """Read USD data in a Maya USD Proxy""" product_types = {"model", "usd", "pointcache", "animation"} - representations = ["usd", "usda", "usdc", "usdz", "abc"] + representations = {"usd", "usda", "usdc", "usdz", "abc"} label = "Load USD to Maya Proxy" order = -1 diff --git a/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd.py b/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd.py index 984d14dff3..f32c76481d 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd.py @@ -28,7 +28,7 @@ class MultiverseUsdLoader(load.LoaderPlugin): "pointcache", "animation", } - representations = ["usd", "usda", "usdc", "usdz", "abc"] + representations = {"usd", "usda", "usdc", "usdz", "abc"} label = "Load USD to Multiverse" order = -10 diff --git a/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd_over.py b/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd_over.py index dc5bc6ec1c..b23fa48f07 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd_over.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd_over.py @@ -20,7 +20,7 @@ class MultiverseUsdOverLoader(load.LoaderPlugin): """Reference file""" product_types = {"mvUsdOverride"} - representations = ["usda", "usd", "udsz"] + representations = {"usda", "usd", "udsz"} label = "Load Usd Override into Compound" order = -10 diff --git a/client/ayon_core/hosts/maya/plugins/load/load_redshift_proxy.py b/client/ayon_core/hosts/maya/plugins/load/load_redshift_proxy.py index 0f91d9048a..7760d4081c 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_redshift_proxy.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_redshift_proxy.py @@ -23,7 +23,7 @@ class RedshiftProxyLoader(load.LoaderPlugin): """Load Redshift proxy""" product_types = {"redshiftproxy"} - representations = ["rs"] + representations = {"rs"} label = "Import Redshift Proxy" order = -10 diff --git a/client/ayon_core/hosts/maya/plugins/load/load_reference.py b/client/ayon_core/hosts/maya/plugins/load/load_reference.py index de18b2b0ec..847591bd11 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_reference.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_reference.py @@ -107,7 +107,7 @@ class ReferenceLoader(plugin.ReferenceLoader): "matchmove", } - representations = ["ma", "abc", "fbx", "mb"] + representations = {"ma", "abc", "fbx", "mb"} label = "Reference" order = -10 @@ -269,7 +269,7 @@ class MayaUSDReferenceLoader(ReferenceLoader): label = "Reference Maya USD" product_types = {"usd"} - representations = ["usd"] + representations = {"usd"} extensions = {"usd", "usda", "usdc"} options = ReferenceLoader.options + [ diff --git a/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py b/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py index 6f20e677f0..d5685b2c4c 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py @@ -9,7 +9,9 @@ instance. import json import sys import six +import contextlib +from ayon_core.lib import BoolDef, EnumDef from ayon_core.pipeline import ( load, get_representation_path @@ -21,59 +23,115 @@ from maya import cmds import maya.app.renderSetup.model.renderSetup as renderSetup +@contextlib.contextmanager +def mark_all_imported(enabled): + """Mark all imported nodes accepted by removing the `imported` attribute""" + if not enabled: + yield + return + + node_types = cmds.pluginInfo("renderSetup", query=True, dependNode=True) + + # Get node before load, then we can disable `imported` + # attribute on all new render setup layers after import + before = cmds.ls(type=node_types, long=True) + try: + yield + finally: + after = cmds.ls(type=node_types, long=True) + for node in (node for node in after if node not in before): + if cmds.attributeQuery("imported", + node=node, + exists=True): + plug = "{}.imported".format(node) + if cmds.getAttr(plug): + cmds.deleteAttr(plug) + + class RenderSetupLoader(load.LoaderPlugin): """Load json preset for RenderSetup overwriting current one.""" product_types = {"rendersetup"} - representations = ["json"] + representations = {"json"} defaults = ['Main'] label = "Load RenderSetup template" icon = "tablet" color = "orange" + options = [ + BoolDef("accept_import", + label="Accept import on load", + tooltip=( + "By default importing or pasting Render Setup collections " + "will display them italic in the Render Setup list.\nWith " + "this enabled the load will directly mark the import " + "'accepted' and remove the italic view." + ), + default=True), + BoolDef("load_managed", + label="Load Managed", + tooltip=( + "Containerize the rendersetup on load so it can be " + "'updated' later." + ), + default=True), + EnumDef("import_mode", + label="Import mode", + items={ + renderSetup.DECODE_AND_OVERWRITE: ( + "Flush existing render setup and " + "add without any namespace" + ), + renderSetup.DECODE_AND_MERGE: ( + "Merge with the existing render setup objects and " + "rename the unexpected objects" + ), + renderSetup.DECODE_AND_RENAME: ( + "Renaming all decoded render setup objects to not " + "conflict with the existing render setup" + ), + }, + default=renderSetup.DECODE_AND_OVERWRITE) + ] + def load(self, context, name, namespace, data): """Load RenderSetup settings.""" - # from ayon_core.hosts.maya.api.lib import namespaced - - folder_name = context["folder"]["name"] - namespace = namespace or lib.unique_namespace( - folder_name + "_", - prefix="_" if folder_name[0].isdigit() else "", - suffix="_", - ) path = self.filepath_from_context(context) + + accept_import = data.get("accept_import", True) + import_mode = data.get("import_mode", renderSetup.DECODE_AND_OVERWRITE) + self.log.info(">>> loading json [ {} ]".format(path)) - with open(path, "r") as file: - renderSetup.instance().decode( - json.load(file), renderSetup.DECODE_AND_OVERWRITE, None) + with mark_all_imported(accept_import): + with open(path, "r") as file: + renderSetup.instance().decode( + json.load(file), import_mode, None) - nodes = [] - null = cmds.sets(name="null_SET", empty=True) - nodes.append(null) + if data.get("load_managed", True): + self.log.info(">>> containerising [ {} ]".format(name)) + folder_name = context["folder"]["name"] + namespace = namespace or lib.unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) - self[:] = nodes - if not nodes: - return - - self.log.info(">>> containerising [ {} ]".format(name)) - return containerise( - name=name, - namespace=namespace, - nodes=nodes, - context=context, - loader=self.__class__.__name__) + return containerise( + name=name, + namespace=namespace, + nodes=[], + context=context, + loader=self.__class__.__name__) def remove(self, container): """Remove RenderSetup settings instance.""" - from maya import cmds - container_name = container["objectName"] self.log.info("Removing '%s' from Maya.." % container["name"]) - container_content = cmds.sets(container_name, query=True) + container_content = cmds.sets(container_name, query=True) or [] nodes = cmds.ls(container_content, long=True) nodes.append(container_name) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py index 0780a7b3e7..5b0c78fd6f 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py @@ -13,7 +13,7 @@ class LoadVDBtoArnold(load.LoaderPlugin): """Load OpenVDB for Arnold in aiVolume""" product_types = {"vdbcache"} - representations = ["vdb"] + representations = {"vdb"} label = "Load VDB to Arnold" icon = "cloud" diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_redshift.py b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_redshift.py index 3fa490f405..e345a7bf6f 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_redshift.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_redshift.py @@ -19,7 +19,7 @@ class LoadVDBtoRedShift(load.LoaderPlugin): """ product_types = {"vdbcache"} - representations = ["vdb"] + representations = {"vdb"} label = "Load VDB to RedShift" icon = "cloud" diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_vray.py b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_vray.py index 7b87c21f38..d6d1c948b0 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_vray.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_vray.py @@ -78,7 +78,7 @@ class LoadVDBtoVRay(load.LoaderPlugin): """Load OpenVDB in a V-Ray Volume Grid""" product_types = {"vdbcache"} - representations = ["vdb"] + representations = {"vdb"} label = "Load VDB to VRay" icon = "cloud" diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vrayproxy.py b/client/ayon_core/hosts/maya/plugins/load/load_vrayproxy.py index 895a4a4127..14d645021c 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vrayproxy.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vrayproxy.py @@ -28,7 +28,7 @@ class VRayProxyLoader(load.LoaderPlugin): """Load VRay Proxy with Alembic or VrayMesh.""" product_types = {"vrayproxy", "model", "pointcache", "animation"} - representations = ["vrmesh", "abc"] + representations = {"vrmesh", "abc"} label = "Import VRay Proxy" order = -10 diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vrayscene.py b/client/ayon_core/hosts/maya/plugins/load/load_vrayscene.py index 36a25e2af1..ea3215da97 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vrayscene.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vrayscene.py @@ -18,7 +18,7 @@ class VRaySceneLoader(load.LoaderPlugin): """Load Vray scene""" product_types = {"vrayscene_layer"} - representations = ["vrscene"] + representations = {"vrscene"} label = "Import VRay Scene" order = -10 diff --git a/client/ayon_core/hosts/maya/plugins/load/load_xgen.py b/client/ayon_core/hosts/maya/plugins/load/load_xgen.py index 880efd82e1..e2664439b0 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_xgen.py @@ -21,7 +21,7 @@ class XgenLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): """Load Xgen as reference""" product_types = {"xgen"} - representations = ["ma", "mb"] + representations = {"ma", "mb"} label = "Reference Xgen" icon = "code-fork" diff --git a/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py b/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py index a5cd04b0f4..caea6b7a72 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py @@ -37,7 +37,7 @@ class YetiCacheLoader(load.LoaderPlugin): """Load Yeti Cache with one or more Yeti nodes""" product_types = {"yeticache", "yetiRig"} - representations = ["fur"] + representations = {"fur"} label = "Load Yeti Cache" order = -9 diff --git a/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py b/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py index 74e33c5866..bf9525bae3 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py @@ -8,7 +8,7 @@ class YetiRigLoader(plugin.ReferenceLoader): """This loader will load Yeti rig.""" product_types = {"yetiRig"} - representations = ["ma"] + representations = {"ma"} label = "Load Yeti Rig" order = -9 diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py b/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py index 2d621353e6..0db89bee31 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py @@ -46,11 +46,18 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin): self.log.debug("data: {}".format(instance.data)) def get_hierarchy(self, nodes): - """Return nodes with all their children""" + """Return nodes with all their children. + + Arguments: + nodes (List[str]): List of nodes to collect children hierarchy for + + Returns: + list: Input nodes with their children hierarchy + + """ nodes = cmds.ls(nodes, long=True) if not nodes: return [] - children = get_all_children(nodes) - # Make sure nodes merged with children only - # contains unique entries - return list(set(nodes + children)) + + children = get_all_children(nodes, ignore_intermediate_objects=True) + return list(children.union(nodes)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py b/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py index 94fcc834e1..93b46c511b 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py @@ -1,5 +1,3 @@ -import json - from maya import cmds import pyblish.api @@ -11,18 +9,24 @@ class CollectFileDependencies(pyblish.api.ContextPlugin): label = "Collect File Dependencies" order = pyblish.api.CollectorOrder - 0.49 hosts = ["maya"] + families = ["renderlayer"] + + @classmethod + def apply_settings(cls, project_settings, system_settings): + # Disable plug-in if not used for deadline submission anyway + settings = project_settings["deadline"]["publish"]["MayaSubmitDeadline"] # noqa + cls.enabled = settings.get("asset_dependencies", True) def process(self, context): - dependencies = [] + dependencies = set() for node in cmds.ls(type="file"): path = cmds.getAttr("{}.{}".format(node, "fileTextureName")) if path not in dependencies: - dependencies.append(path) + dependencies.add(path) for node in cmds.ls(type="AlembicNode"): path = cmds.getAttr("{}.{}".format(node, "abc_File")) if path not in dependencies: - dependencies.append(path) + dependencies.add(path) - context.data["fileDependencies"] = dependencies - self.log.debug(json.dumps(dependencies, indent=4)) + context.data["fileDependencies"] = list(dependencies) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py b/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py index 85be15bb7b..774c217cfd 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py @@ -48,15 +48,15 @@ class CollectNewInstances(pyblish.api.InstancePlugin): # Collect members members = cmds.ls(members, long=True) or [] + # Collect full hierarchy dag_members = cmds.ls(members, type="dagNode", long=True) - children = get_all_children(dag_members) - children = cmds.ls(children, noIntermediate=True, long=True) - parents = ( - self.get_all_parents(members) - if creator_attributes.get("includeParentHierarchy", True) - else [] - ) - members_hierarchy = list(set(members + children + parents)) + children = get_all_children(dag_members, + ignore_intermediate_objects=True) + + members_hierarchy = set(members) + members_hierarchy.update(children) + if creator_attributes.get("includeParentHierarchy", True): + members_hierarchy.update(self.get_all_parents(dag_members)) instance[:] = members_hierarchy @@ -97,16 +97,16 @@ class CollectNewInstances(pyblish.api.InstancePlugin): """Get all parents by using string operations (optimization) Args: - nodes (list): the nodes which are found in the objectSet + nodes (iterable): the nodes which are found in the objectSet Returns: - list + set """ - parents = [] + parents = set() for node in nodes: splitted = node.split("|") items = ["|".join(splitted[0:i]) for i in range(2, len(splitted))] - parents.extend(items) + parents.update(items) - return list(set(parents)) + return parents diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_look.py b/client/ayon_core/hosts/maya/plugins/publish/collect_look.py index 00e1855b19..a3a32bc0cb 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_look.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_look.py @@ -8,7 +8,7 @@ from maya import cmds # noqa import pyblish.api from ayon_core.hosts.maya.api import lib -SHAPE_ATTRS = ["castsShadows", +SHAPE_ATTRS = {"castsShadows", "receiveShadows", "motionBlur", "primaryVisibility", @@ -16,8 +16,7 @@ SHAPE_ATTRS = ["castsShadows", "visibleInReflections", "visibleInRefractions", "doubleSided", - "opposite"] -SHAPE_ATTRS = set(SHAPE_ATTRS) + "opposite"} def get_pxr_multitexture_file_attrs(node): @@ -59,9 +58,8 @@ for node_type in list(FILE_NODES.keys()): if node_type not in all_node_types: FILE_NODES.pop(node_type) -for node_type in RENDER_SET_TYPES: - if node_type not in all_node_types: - RENDER_SET_TYPES.remove(node_type) +RENDER_SET_TYPES = [node_type for node_type in RENDER_SET_TYPES + if node_type in all_node_types] del all_node_types # Cache pixar dependency node types so we can perform a type lookup against it @@ -109,8 +107,7 @@ def get_look_attrs(node): if cmds.objectType(node, isAType="shape"): attrs = cmds.listAttr(node, changedSinceFileOpen=True) or [] for attr in attrs: - if attr in SHAPE_ATTRS or \ - attr not in SHAPE_ATTRS and attr.startswith('ai'): + if attr in SHAPE_ATTRS or attr.startswith('ai'): result.append(attr) return result @@ -290,7 +287,6 @@ class CollectLook(pyblish.api.InstancePlugin): families = ["look"] label = "Collect Look" hosts = ["maya"] - maketx = True def process(self, instance): """Collect the Look in the instance with the correct layer settings""" @@ -302,15 +298,12 @@ class CollectLook(pyblish.api.InstancePlugin): """Collect looks. Args: - instance: Instance to collect. + instance (pyblish.api.Instance): Instance to collect. """ self.log.debug("Looking for look associations " "for %s" % instance.data['name']) - # Lookup set (optimization) - instance_lookup = set(cmds.ls(instance, long=True)) - # Discover related object sets self.log.debug("Gathering sets ...") sets = self.collect_sets(instance) @@ -351,75 +344,15 @@ class CollectLook(pyblish.api.InstancePlugin): # Collect file nodes used by shading engines (if we have any) files = [] look_sets = list(sets.keys()) - shader_attrs = [ - "surfaceShader", - "volumeShader", - "displacementShader", - "aiSurfaceShader", - "aiVolumeShader", - "rman__surface", - "rman__displacement" - ] if look_sets: self.log.debug("Found look sets: {}".format(look_sets)) - - # Get all material attrs for all look sets to retrieve their inputs - existing_attrs = [] - for look in look_sets: - for attr in shader_attrs: - if cmds.attributeQuery(attr, node=look, exists=True): - existing_attrs.append("{}.{}".format(look, attr)) - - materials = cmds.listConnections(existing_attrs, - source=True, - destination=False) or [] - - self.log.debug("Found materials:\n{}".format(materials)) - - self.log.debug("Found the following sets:\n{}".format(look_sets)) - # Get the entire node chain of the look sets - # history = cmds.listHistory(look_sets, allConnections=True) - # if materials list is empty, listHistory() will crash with - # RuntimeError - history = set() - if materials: - history = set( - cmds.listHistory(materials, allConnections=True)) - - # Since we retrieved history only of the connected materials - # connected to the look sets above we now add direct history - # for some of the look sets directly - # handling render attribute sets - - # Maya (at least 2024) crashes with Warning when render set type - # isn't available. cmds.ls() will return empty list - if RENDER_SET_TYPES: - render_sets = cmds.ls(look_sets, type=RENDER_SET_TYPES) - if render_sets: - history.update( - cmds.listHistory(render_sets, - future=False, - pruneDagObjects=True) - or [] - ) - - # Ensure unique entries only - history = list(history) - - files = cmds.ls(history, - # It's important only node types are passed that - # exist (e.g. for loaded plugins) because otherwise - # the result will turn back empty - type=list(FILE_NODES.keys()), - long=True) - - # Sort for log readability - files.sort() + files = self.collect_file_nodes(look_sets) self.log.debug("Collected file nodes:\n{}".format(files)) - # Collect textures if any file nodes are found + + # Collect texture resources if any file nodes are found resources = [] - for node in files: # sort for log readability + for node in files: resources.extend(self.collect_resources(node)) instance.data["resources"] = resources self.log.debug("Collected resources: {}".format(resources)) @@ -439,6 +372,78 @@ class CollectLook(pyblish.api.InstancePlugin): self.log.debug("Collected look for %s" % instance) + def collect_file_nodes(self, look_sets): + """Get the entire node chain of the look sets and return file nodes + + Arguments: + look_sets (List[str]): List of sets and shading engines relevant + to the look. + + Returns: + List[str]: List of file node names. + + """ + + shader_attrs = [ + "surfaceShader", + "volumeShader", + "displacementShader", + "aiSurfaceShader", + "aiVolumeShader", + "rman__surface", + "rman__displacement" + ] + + # Get all material attrs for all look sets to retrieve their inputs + existing_attrs = [] + for look_set in look_sets: + for attr in shader_attrs: + if cmds.attributeQuery(attr, node=look_set, exists=True): + existing_attrs.append("{}.{}".format(look_set, attr)) + + materials = cmds.listConnections(existing_attrs, + source=True, + destination=False) or [] + + self.log.debug("Found materials:\n{}".format(materials)) + + # Get the entire node chain of the look sets + # history = cmds.listHistory(look_sets, allConnections=True) + # if materials list is empty, listHistory() will crash with + # RuntimeError + history = set() + if materials: + history.update(cmds.listHistory(materials, allConnections=True)) + + # Since we retrieved history only of the connected materials connected + # to the look sets above we now add direct history for some of the + # look sets directly handling render attribute sets + + # Maya (at least 2024) crashes with Warning when render set type + # isn't available. cmds.ls() will return empty list + if RENDER_SET_TYPES: + render_sets = cmds.ls(look_sets, type=RENDER_SET_TYPES) + if render_sets: + history.update( + cmds.listHistory(render_sets, + future=False, + pruneDagObjects=True) + or [] + ) + + # Get file nodes in the material history + files = cmds.ls(list(history), + # It's important only node types are passed that + # exist (e.g. for loaded plugins) because otherwise + # the result will turn back empty + type=list(FILE_NODES.keys()), + long=True) + + # Sort for log readability + files.sort() + + return files + def collect_sets(self, instance): """Collect all objectSets which are of importance for publishing @@ -446,7 +451,8 @@ class CollectLook(pyblish.api.InstancePlugin): which need to be Args: - instance (list): all nodes to be published + instance (pyblish.api.Instance): publish instance containing all + nodes to be published. Returns: dict @@ -624,7 +630,7 @@ class CollectLook(pyblish.api.InstancePlugin): "source": source, # required for resources "files": files, "color_space": color_space - } # required for resources + } class CollectModelRenderSets(CollectLook): @@ -639,13 +645,13 @@ class CollectModelRenderSets(CollectLook): families = ["model"] label = "Collect Model Render Sets" hosts = ["maya"] - maketx = True def collect_sets(self, instance): """Collect all related objectSets except shadingEngines Args: - instance (list): all nodes to be published + instance (pyblish.api.Instance): publish instance containing all + nodes to be published. Returns: dict @@ -661,7 +667,7 @@ class CollectModelRenderSets(CollectLook): if objset in sets: continue - if "shadingEngine" in cmds.nodeType(objset, inherited=True): + if cmds.objectType(objset, isAType="shadingEngine"): continue sets[objset] = {"uuid": lib.get_id(objset), "members": list()} diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_render.py b/client/ayon_core/hosts/maya/plugins/publish/collect_render.py index ff959afabc..21095935a2 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_render.py @@ -1,24 +1,19 @@ # -*- coding: utf-8 -*- """Collect render data. -This collector will go through render layers in maya and prepare all data -needed to create instances and their representations for submission and -publishing on farm. +This collector will go through renderlayer instances and prepare all data +needed to detect the expected rendered files for a layer, with resolution, +frame ranges and collects the data needed for publishing on the farm. Requires: instance -> families - instance -> setMembers - instance -> folderPath context -> currentFile - context -> workspaceDir context -> user -Optional: - Provides: instance -> label - instance -> productName + instance -> subset instance -> attachTo instance -> setMembers instance -> publish @@ -26,6 +21,8 @@ Provides: instance -> frameEnd instance -> byFrameStep instance -> renderer + instance -> family + instance -> asset instance -> time instance -> author instance -> source @@ -71,8 +68,6 @@ class CollectMayaRender(pyblish.api.InstancePlugin): # TODO: Re-add force enable of workfile instance? # TODO: Re-add legacy layer support with LAYER_ prefix but in Creator - # TODO: Set and collect active state of RenderLayer in Creator using - # renderlayer.isRenderable() context = instance.context layer = instance.data["transientData"]["layer"] @@ -112,7 +107,13 @@ class CollectMayaRender(pyblish.api.InstancePlugin): except UnsupportedRendererException as exc: raise KnownPublishError(exc) render_products = layer_render_products.layer_data.products - assert render_products, "no render products generated" + if not render_products: + self.log.error( + "No render products generated for '%s'. You might not have " + "any render camera in the renderlayer or render end frame is " + "lower than start frame.", + instance.name + ) expected_files = [] multipart = False for product in render_products: @@ -130,16 +131,21 @@ class CollectMayaRender(pyblish.api.InstancePlugin): }) has_cameras = any(product.camera for product in render_products) - assert has_cameras, "No render cameras found." - - self.log.debug("multipart: {}".format( - multipart)) - assert expected_files, "no file names were generated, this is a bug" - self.log.debug( - "expected files: {}".format( - json.dumps(expected_files, indent=4, sort_keys=True) + if render_products and not has_cameras: + self.log.error( + "No render cameras found for: %s", + instance ) - ) + if not expected_files: + self.log.warning( + "No file names were generated, this is a bug.") + + for render_product in render_products: + self.log.debug(render_product) + self.log.debug("multipart: {}".format(multipart)) + self.log.debug("expected files: {}".format( + json.dumps(expected_files, indent=4, sort_keys=True) + )) # if we want to attach render to product, check if we have AOV's # in expectedFiles. If so, raise error as we cannot attach AOV @@ -151,14 +157,14 @@ class CollectMayaRender(pyblish.api.InstancePlugin): ) # append full path - aov_dict = {} image_directory = os.path.join( cmds.workspace(query=True, rootDirectory=True), cmds.workspace(fileRuleEntry="images") ) # replace relative paths with absolute. Render products are # returned as list of dictionaries. - publish_meta_path = None + publish_meta_path = "NOT-SET" + aov_dict = {} for aov in expected_files: full_paths = [] aov_first_key = list(aov.keys())[0] @@ -169,14 +175,6 @@ class CollectMayaRender(pyblish.api.InstancePlugin): publish_meta_path = os.path.dirname(full_path) aov_dict[aov_first_key] = full_paths full_exp_files = [aov_dict] - self.log.debug(full_exp_files) - - if publish_meta_path is None: - raise KnownPublishError("Unable to detect any expected output " - "images for: {}. Make sure you have a " - "renderable camera and a valid frame " - "range set for your renderlayer." - "".format(instance.name)) frame_start_render = int(self.get_render_attribute( "startFrame", layer=layer_name)) @@ -222,7 +220,8 @@ class CollectMayaRender(pyblish.api.InstancePlugin): common_publish_meta_path = "/" + common_publish_meta_path self.log.debug( - "Publish meta path: {}".format(common_publish_meta_path)) + "Publish meta path: {}".format(common_publish_meta_path) + ) # Get layer specific settings, might be overrides colorspace_data = lib.get_color_management_preferences() diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py b/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py index c4af2914cd..cb3951ec0c 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py @@ -299,4 +299,10 @@ def transfer_image_planes(source_cameras, target_cameras, def _attach_image_plane(camera, image_plane): cmds.imagePlane(image_plane, edit=True, detach=True) + + # Attaching to a camera resets it to identity size, so we counter that + size_x = cmds.getAttr(f"{image_plane}.sizeX") + size_y = cmds.getAttr(f"{image_plane}.sizeY") cmds.imagePlane(image_plane, edit=True, camera=camera) + cmds.setAttr(f"{image_plane}.sizeX", size_x) + cmds.setAttr(f"{image_plane}.sizeY", size_y) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py b/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py index 19825b769c..4b293b5785 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py @@ -5,7 +5,8 @@ from maya import cmds from ayon_core.pipeline import publish -class ExtractGPUCache(publish.Extractor): +class ExtractGPUCache(publish.Extractor, + publish.OptionalPyblishPluginMixin): """Extract the content of the instance to a GPU cache file.""" label = "GPU Cache" @@ -20,6 +21,9 @@ class ExtractGPUCache(publish.Extractor): useBaseTessellation = True def process(self, instance): + if not self.is_active(instance.data): + return + cmds.loadPlugin("gpuCache", quiet=True) staging_dir = self.staging_dir(instance) diff --git a/client/ayon_core/hosts/maya/plugins/publish/help/validate_animation_out_set_related_node_ids.xml b/client/ayon_core/hosts/maya/plugins/publish/help/validate_animation_out_set_related_node_ids.xml new file mode 100644 index 0000000000..a855dd90a5 --- /dev/null +++ b/client/ayon_core/hosts/maya/plugins/publish/help/validate_animation_out_set_related_node_ids.xml @@ -0,0 +1,29 @@ + + + +Shape IDs mismatch original shape +## Shapes mismatch IDs with original shape + +Meshes are detected where the (deformed) mesh has a different `cbId` than +the same mesh in its deformation history. +Theses should normally be the same. + +### How to repair? + +By using the repair action the IDs from the shape in history will be +copied to the deformed shape. For **animation** instances using the +repair action usually is usually the correct fix. + + + +### How does this happen? + +When a deformer is applied in the scene on a referenced mesh that had no +deformers then Maya will create a new shape node for the mesh that +does not have the original id. Then on scene save new ids get created for the +meshes lacking a `cbId` and thus the mesh then has a different `cbId` than +the mesh in the deformation history. + + + + diff --git a/client/ayon_core/hosts/maya/plugins/publish/help/validate_mesh_non_manifold.xml b/client/ayon_core/hosts/maya/plugins/publish/help/validate_mesh_non_manifold.xml new file mode 100644 index 0000000000..5aec3009a7 --- /dev/null +++ b/client/ayon_core/hosts/maya/plugins/publish/help/validate_mesh_non_manifold.xml @@ -0,0 +1,33 @@ + + + +Non-Manifold Edges/Vertices +## Non-Manifold Edges/Vertices + +Meshes found with non-manifold edges or vertices. + +### How to repair? + +Run select invalid to select the invalid components. + +You can also try the _cleanup matching polygons_ action which will perform a +cleanup like Maya's `Mesh > Cleanup...` modeling tool. + +It is recommended to always select the invalid to see where the issue is +because if you run any repair on it you will need to double check the topology +is still like you wanted. + + + +### What is non-manifold topology? + +_Non-manifold topology_ polygons have a configuration that cannot be unfolded +into a continuous flat piece, for example: + +- Three or more faces share an edge +- Two or more faces share a single vertex but no edge. +- Adjacent faces have opposite normals + + + + diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/client/ayon_core/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py index 2502fd74b2..7ecd602662 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -6,7 +6,7 @@ from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, - PublishValidationError, + PublishXmlValidationError, OptionalPyblishPluginMixin, get_plugin_settings, apply_plugin_settings_automatically @@ -56,40 +56,39 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin, # if a deformer has been created on the shape invalid = self.get_invalid(instance) if invalid: - # TODO: Message formatting can be improved - raise PublishValidationError("Nodes found with mismatching " - "IDs: {0}".format(invalid), - title="Invalid node ids") + + # Use the short names + invalid = cmds.ls(invalid) + invalid.sort() + + # Construct a human-readable list + invalid = "\n".join("- {}".format(node) for node in invalid) + + raise PublishXmlValidationError( + plugin=self, + message=( + "Nodes have different IDs than their input " + "history: \n{0}".format(invalid) + ) + ) @classmethod def get_invalid(cls, instance): """Get all nodes which do not match the criteria""" invalid = [] - types_to_skip = ["locator"] + types = ["mesh", "nurbsCurve", "nurbsSurface"] # get asset id nodes = instance.data.get("out_hierarchy", instance[:]) - for node in nodes: + for node in cmds.ls(nodes, type=types, long=True): # We only check when the node is *not* referenced if cmds.referenceQuery(node, isNodeReferenced=True): continue - # Check if node is a shape as deformers only work on shapes - obj_type = cmds.objectType(node, isAType="shape") - if not obj_type: - continue - - # Skip specific types - if cmds.objectType(node) in types_to_skip: - continue - # Get the current id of the node node_id = lib.get_id(node) - if not node_id: - invalid.append(node) - continue history_id = lib.get_id_from_sibling(node) if history_id is not None and node_id != history_id: diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_color_sets.py b/client/ayon_core/hosts/maya/plugins/publish/validate_color_sets.py index e69717fad0..f70b46f89e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_color_sets.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_color_sets.py @@ -10,7 +10,7 @@ from ayon_core.pipeline.publish import ( ) -class ValidateColorSets(pyblish.api.Validator, +class ValidateColorSets(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate all meshes in the instance have unlocked normals diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py b/client/ayon_core/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py index 4590c53931..045e22545c 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py @@ -1,14 +1,18 @@ +import inspect + import pyblish.api from maya import cmds from ayon_core.pipeline.publish import ( context_plugin_should_run, + PublishValidationError, OptionalPyblishPluginMixin ) + class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin, OptionalPyblishPluginMixin): - """Validate if current render layer has a renderable camera + """Validate if current render layer has a renderable camera. There is a bug in Redshift which occurs when the current render layer at file open has no renderable camera. The error raised is as follows: @@ -32,8 +36,39 @@ class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin, if not context_plugin_should_run(self, context): return - layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True) + # This validator only makes sense when publishing renderlayer instances + # with Redshift. We skip validation if there isn't any. + if not any(self.is_active_redshift_render_instance(instance) + for instance in context): + return + cameras = cmds.ls(type="camera", long=True) renderable = any(c for c in cameras if cmds.getAttr(c + ".renderable")) - assert renderable, ("Current render layer '%s' has no renderable " - "camera" % layer) + if not renderable: + layer = cmds.editRenderLayerGlobals(query=True, + currentRenderLayer=True) + raise PublishValidationError( + "Current render layer '{}' has no renderable camera".format( + layer + ), + description=inspect.getdoc(self) + ) + + @staticmethod + def is_active_redshift_render_instance(instance) -> bool: + """Return whether instance is an active renderlayer instance set to + render with Redshift renderer.""" + if not instance.data.get("active", True): + return False + + # Check this before families just because it's a faster check + if not instance.data.get("renderer") == "redshift": + return False + + families = set() + families.add(instance.data.get("family")) + families.update(instance.data.get("families", [])) + if "renderlayer" not in families: + return False + + return True diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_in_context.py b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_in_context.py index c5a3b1659d..e6f4b908bb 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_in_context.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_in_context.py @@ -11,8 +11,6 @@ from ayon_core.pipeline.publish import ( OptionalPyblishPluginMixin ) -from maya import cmds - class ValidateInstanceInContext(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): @@ -38,17 +36,20 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin, return folder_path = instance.data.get("folderPath") - context_folder_path = self.get_context_folder_path(instance) - if folder_path != context_folder_path: + task = instance.data.get("task") + context = self.get_context(instance) + if (folder_path, task) != context: + context_label = "{} > {}".format(*context) + instance_label = "{} > {}".format(folder_path, task) raise PublishValidationError( message=( - "Instance '{}' publishes to different folder than current" + "Instance '{}' publishes to different context than current" " context: {}. Current context: {}".format( - instance.name, folder_path, context_folder_path + instance.name, instance_label, context_label ) ), description=( - "## Publishing to a different folder\n" + "## Publishing to a different context data\n" "There are publish instances present which are publishing " "into a different folder than your current context.\n\n" "Usually this is not what you want but there can be cases " @@ -64,14 +65,20 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin, @classmethod def repair(cls, instance): - context_folder_path = cls.get_context_folder_path(instance) - instance_node = instance.data["instance_node"] - cmds.setAttr( - "{}.folderPath".format(instance_node), - context_folder_path, - type="string" + context_folder_path, context_task = cls.get_context( + instance) + + create_context = instance.context.data["create_context"] + instance_id = instance.data["instance_id"] + created_instance = create_context.get_instance_by_id( + instance_id ) + created_instance["folderPath"] = context_folder_path + created_instance["task"] = context_task + create_context.save_changes() @staticmethod - def get_context_folder_path(instance): - return instance.context.data["folderPath"] + def get_context(instance): + """Return asset, task from publishing context data""" + context = instance.context + return context.data["folderPath"], context.data["task"] diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py b/client/ayon_core/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py index d8a9222c36..cfd4156124 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py @@ -2,7 +2,6 @@ from maya import cmds import pyblish.api from ayon_core.pipeline.publish import ( - ValidateContentsOrder, RepairContextAction, PublishValidationError ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py b/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py index e70a805de4..070974aef5 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py @@ -47,10 +47,18 @@ class ValidateShadingEngine(pyblish.api.InstancePlugin, shape, destination=True, type="shadingEngine" ) or [] for shading_engine in shading_engines: - name = ( - cmds.listConnections(shading_engine + ".surfaceShader")[0] - + "SG" + materials = cmds.listConnections( + shading_engine + ".surfaceShader", + source=True, destination=False ) + if not materials: + cls.log.warning( + "Shading engine '{}' has no material connected to its " + ".surfaceShader attribute.".format(shading_engine)) + continue + + material = materials[0] # there should only ever be one input + name = material + "SG" if shading_engine != name: invalid.append(shading_engine) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py b/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py index f1c171bddc..47314b64ac 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py @@ -3,7 +3,6 @@ import maya.cmds as cmds import pyblish.api import ayon_core.hosts.maya.api.lib as mayalib -from ayon_core.pipeline.context_tools import get_current_project_folder from ayon_core.pipeline.publish import ( RepairContextAction, ValidateSceneOrder, @@ -131,6 +130,5 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin, cls.log.debug(current_linear) cls.log.info("Setting time unit to match project") - # TODO replace query with using 'context.data["folderEntity"]' - folder_entity = get_current_project_folder() + folder_entity = context.data["folderEntity"] mayalib.set_scene_fps(folder_entity["attrib"]["fps"]) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py index b6d3dc73fd..58d015e962 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py @@ -10,7 +10,7 @@ from ayon_core.pipeline.publish import ( ) -class ValidateMeshNgons(pyblish.api.Validator, +class ValidateMeshNgons(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure that meshes don't have ngons @@ -45,6 +45,11 @@ class ValidateMeshNgons(pyblish.api.Validator, # Get all faces faces = ['{0}.f[*]'.format(node) for node in meshes] + # Skip meshes that for some reason have no faces, e.g. empty meshes + faces = cmds.ls(faces) + if not faces: + return [] + # Filter to n-sided polygon faces (ngons) invalid = lib.polyConstraint(faces, t=0x0008, # type=face diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py index ff1dca87cf..bf1489f92e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py @@ -16,7 +16,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateMeshNoNegativeScale(pyblish.api.Validator, +class ValidateMeshNoNegativeScale(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure that meshes don't have a negative scale. diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_non_manifold.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_non_manifold.py index 6dbad538ef..958707e4f4 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_non_manifold.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_non_manifold.py @@ -1,14 +1,99 @@ -from maya import cmds +from maya import cmds, mel import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( ValidateMeshOrder, - PublishValidationError, + PublishXmlValidationError, + RepairAction, OptionalPyblishPluginMixin ) +def poly_cleanup(version=4, + meshes=None, + # Version 1 + all_meshes=False, + select_only=False, + history_on=True, + quads=False, + nsided=False, + concave=False, + holed=False, + nonplanar=False, + zeroGeom=False, + zeroGeomTolerance=1e-05, + zeroEdge=False, + zeroEdgeTolerance=1e-05, + zeroMap=False, + zeroMapTolerance=1e-05, + # Version 2 + shared_uvs=False, + non_manifold=False, + # Version 3 + lamina=False, + # Version 4 + invalid_components=False): + """Wrapper around `polyCleanupArgList` mel command""" + + # Get all inputs named as `dict` to easily do conversions and formatting + values = locals() + + # Convert booleans to 1 or 0 + for key in [ + "all_meshes", + "select_only", + "history_on", + "quads", + "nsided", + "concave", + "holed", + "nonplanar", + "zeroGeom", + "zeroEdge", + "zeroMap", + "shared_uvs", + "non_manifold", + "lamina", + "invalid_components", + ]: + values[key] = 1 if values[key] else 0 + + cmd = ( + 'polyCleanupArgList {version} {{ ' + '"{all_meshes}",' # 0: All selectable meshes + '"{select_only}",' # 1: Only perform a selection + '"{history_on}",' # 2: Keep construction history + '"{quads}",' # 3: Check for quads polys + '"{nsided}",' # 4: Check for n-sides polys + '"{concave}",' # 5: Check for concave polys + '"{holed}",' # 6: Check for holed polys + '"{nonplanar}",' # 7: Check for non-planar polys + '"{zeroGeom}",' # 8: Check for 0 area faces + '"{zeroGeomTolerance}",' # 9: Tolerance for face areas + '"{zeroEdge}",' # 10: Check for 0 length edges + '"{zeroEdgeTolerance}",' # 11: Tolerance for edge length + '"{zeroMap}",' # 12: Check for 0 uv face area + '"{zeroMapTolerance}",' # 13: Tolerance for uv face areas + '"{shared_uvs}",' # 14: Unshare uvs that are shared + # across vertices + '"{non_manifold}",' # 15: Check for nonmanifold polys + '"{lamina}",' # 16: Check for lamina polys + '"{invalid_components}"' # 17: Remove invalid components + ' }};'.format(**values) + ) + + mel.eval("source polyCleanupArgList") + if not all_meshes and meshes: + # Allow to specify meshes to run over by selecting them + cmds.select(meshes, replace=True) + mel.eval(cmd) + + +class CleanupMatchingPolygons(RepairAction): + label = "Cleanup matching polygons" + + def _as_report_list(values, prefix="- ", suffix="\n"): """Return list as bullet point list for a report""" if not values: @@ -16,7 +101,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateMeshNonManifold(pyblish.api.Validator, +class ValidateMeshNonManifold(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure that meshes don't have non-manifold edges or vertices @@ -29,7 +114,8 @@ class ValidateMeshNonManifold(pyblish.api.Validator, hosts = ['maya'] families = ['model'] label = 'Mesh Non-Manifold Edges/Vertices' - actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, + CleanupMatchingPolygons] optional = True @staticmethod @@ -39,9 +125,11 @@ class ValidateMeshNonManifold(pyblish.api.Validator, invalid = [] for mesh in meshes: - if (cmds.polyInfo(mesh, nonManifoldVertices=True) or - cmds.polyInfo(mesh, nonManifoldEdges=True)): - invalid.append(mesh) + components = cmds.polyInfo(mesh, + nonManifoldVertices=True, + nonManifoldEdges=True) + if components: + invalid.extend(components) return invalid @@ -49,12 +137,34 @@ class ValidateMeshNonManifold(pyblish.api.Validator, """Process all the nodes in the instance 'objectSet'""" if not self.is_active(instance.data): return + invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError( - "Meshes found with non-manifold edges/vertices:\n\n{0}".format( - _as_report_list(sorted(invalid)) - ), - title="Non-Manifold Edges/Vertices" + # Report only the meshes instead of all component indices + invalid_meshes = { + component.split(".", 1)[0] for component in invalid + } + invalid_meshes = _as_report_list(sorted(invalid_meshes)) + + raise PublishXmlValidationError( + plugin=self, + message=( + "Meshes found with non-manifold " + "edges/vertices:\n\n{0}".format(invalid_meshes) + ) ) + + @classmethod + def repair(cls, instance): + invalid_components = cls.get_invalid(instance) + if not invalid_components: + cls.log.info("No invalid components found to cleanup.") + return + + invalid_meshes = { + component.split(".", 1)[0] for component in invalid_components + } + poly_cleanup(meshes=list(invalid_meshes), + select_only=True, + non_manifold=True) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py index 1790a94580..76b716d01f 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py @@ -18,7 +18,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateMeshNormalsUnlocked(pyblish.api.Validator, +class ValidateMeshNormalsUnlocked(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate all meshes in the instance have unlocked normals diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py index a139b65169..305a58d78e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py @@ -1,3 +1,5 @@ +import inspect + from maya import cmds import pyblish.api @@ -29,8 +31,8 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, RepairAction] - @staticmethod - def get_invalid(instance): + @classmethod + def get_invalid(cls, instance): meshes = cmds.ls(instance, type='mesh', long=True) @@ -40,6 +42,11 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, # Get existing mapping of uv sets by index indices = cmds.polyUVSet(mesh, query=True, allUVSetsIndices=True) maps = cmds.polyUVSet(mesh, query=True, allUVSets=True) + if not indices or not maps: + cls.log.warning("Mesh has no UV set: %s", mesh) + invalid.append(mesh) + continue + mapping = dict(zip(indices, maps)) # Get the uv set at index zero. @@ -56,8 +63,14 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, invalid = self.get_invalid(instance) if invalid: + + invalid_list = "\n".join(f"- {node}" for node in invalid) + raise PublishValidationError( - "Meshes found without 'map1' UV set: {0}".format(invalid)) + "Meshes found without 'map1' UV set:\n" + "{0}".format(invalid_list), + description=self.get_description() + ) @classmethod def repair(cls, instance): @@ -68,6 +81,12 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, # Get existing mapping of uv sets by index indices = cmds.polyUVSet(mesh, query=True, allUVSetsIndices=True) maps = cmds.polyUVSet(mesh, query=True, allUVSets=True) + if not indices or not maps: + # No UV set exist at all, create a `map1` uv set + # This may fail silently if the mesh has no geometry at all + cmds.polyUVSet(mesh, create=True, uvSet="map1") + continue + mapping = dict(zip(indices, maps)) # Ensure there is no uv set named map1 to avoid @@ -97,3 +116,23 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, rename=True, uvSet=original, newUVSet="map1") + + @staticmethod + def get_description(): + return inspect.cleandoc("""### Mesh found without map1 uv set + + A mesh must have a default UV set named `map1` to adhere to the default + mesh behavior of Maya meshes. + + There may be meshes that: + - Have no UV set + - Have no `map1` uv set but are using a different name + - Have a `map1` uv set, but it's not the default (first index) + + + #### Repair + + Using repair will try to make the first UV set the `map1` uv set. If it + does not exist yet it will be created or renames the current first + UV set to `map1`. + """) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py b/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py index 37c78a72ee..bbc644c3db 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py @@ -1,3 +1,5 @@ +import inspect + from maya import cmds import pyblish.api @@ -14,8 +16,7 @@ class ValidateModelContent(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Adheres to the content of 'model' product type - - Must have one top group. (configurable) - - Must only contain: transforms, meshes and groups + See `get_description` for more details. """ @@ -28,13 +29,16 @@ class ValidateModelContent(pyblish.api.InstancePlugin, validate_top_group = True optional = False + allowed = ('mesh', 'transform', 'nurbsCurve', 'nurbsSurface', 'locator') + @classmethod def get_invalid(cls, instance): content_instance = instance.data.get("setMembers", None) if not content_instance: - cls.log.error("Instance has no nodes!") - return [instance.data["name"]] + cls.log.error("Model instance has no nodes. " + "It is not allowed to be empty") + return [instance.data["instance_node"]] # All children will be included in the extracted export so we also # validate *all* descendents of the set members and we skip any @@ -46,30 +50,42 @@ class ValidateModelContent(pyblish.api.InstancePlugin, content_instance = list(set(content_instance + descendants)) # Ensure only valid node types - allowed = ('mesh', 'transform', 'nurbsCurve', 'nurbsSurface', 'locator') nodes = cmds.ls(content_instance, long=True) - valid = cmds.ls(content_instance, long=True, type=allowed) + valid = cmds.ls(content_instance, long=True, type=cls.allowed) invalid = set(nodes) - set(valid) if invalid: - cls.log.error("These nodes are not allowed: %s" % invalid) + # List as bullet points + invalid_bullets = "\n".join(f"- {node}" for node in invalid) + + cls.log.error( + "These nodes are not allowed:\n{}\n\n" + "The valid node types are: {}".format( + invalid_bullets, ", ".join(cls.allowed)) + ) return list(invalid) if not valid: - cls.log.error("No valid nodes in the instance") - return True + cls.log.error( + "No valid nodes in the model instance.\n" + "The valid node types are: {}".format(", ".join(cls.allowed)) + ) + return [instance.data["instance_node"]] # Ensure it has shapes shapes = cmds.ls(valid, long=True, shapes=True) if not shapes: cls.log.error("No shapes in the model instance") - return True + return [instance.data["instance_node"]] - # Top group - top_parents = set([x.split("|")[1] for x in content_instance]) + # Ensure single top group + top_parents = {"|" + x.split("|", 2)[1] for x in content_instance} if cls.validate_top_group and len(top_parents) != 1: - cls.log.error("Must have exactly one top group") - return top_parents + cls.log.error( + "A model instance must have exactly one top group. " + "Found top groups: {}".format(", ".join(top_parents)) + ) + return list(top_parents) def _is_visible(node): """Return whether node is visible""" @@ -101,5 +117,21 @@ class ValidateModelContent(pyblish.api.InstancePlugin, if invalid: raise PublishValidationError( title="Model content is invalid", - message="See log for more details" + message="Model content is invalid. See log for more details.", + description=self.get_description() ) + + @classmethod + def get_description(cls): + return inspect.cleandoc(f""" + ### Model content is invalid + + Your model instance does not adhere to the rules of a + model product type: + + - Must have at least one visible shape in it, like a mesh. + - Must have one root node. When exporting multiple meshes they + must be inside a group. + - May only contain the following node types: + {", ".join(cls.allowed)} + """) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_no_animation.py b/client/ayon_core/hosts/maya/plugins/publish/validate_no_animation.py index 6e0719628f..bf45c0e974 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_no_animation.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_no_animation.py @@ -16,7 +16,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateNoAnimation(pyblish.api.Validator, +class ValidateNoAnimation(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure no keyframes on nodes in the Instance. diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_no_null_transforms.py b/client/ayon_core/hosts/maya/plugins/publish/validate_no_null_transforms.py index a9dc1d5bef..38955fd777 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_no_null_transforms.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_no_null_transforms.py @@ -19,22 +19,17 @@ def _as_report_list(values, prefix="- ", suffix="\n"): def has_shape_children(node): # Check if any descendants - allDescendents = cmds.listRelatives(node, - allDescendents=True, - fullPath=True) - if not allDescendents: + all_descendents = cmds.listRelatives(node, + allDescendents=True, + fullPath=True) + if not all_descendents: return False # Check if there are any shapes at all - shapes = cmds.ls(allDescendents, shapes=True) + shapes = cmds.ls(all_descendents, shapes=True, noIntermediate=True) if not shapes: return False - # Check if all descendent shapes are intermediateObjects; - # if so we consider this node a null node and return False. - if all(cmds.getAttr('{0}.intermediateObject'.format(x)) for x in shapes): - return False - return True diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids.py index ba748a4fc4..2d6f231cb5 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids.py @@ -60,7 +60,8 @@ class ValidateNodeIDs(pyblish.api.InstancePlugin): # We do want to check the referenced nodes as it might be # part of the end product. id_nodes = lib.get_id_required_nodes(referenced_nodes=True, - nodes=instance[:]) - invalid = [n for n in id_nodes if not lib.get_id(n)] - - return invalid + nodes=instance[:], + # Exclude those with already + # existing ids + existing_ids=False) + return id_nodes diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py index 5ca9690fd7..d679c510af 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py @@ -37,27 +37,27 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( - ("Found folder ids which are not related to " - "current project in instance: `{}`").format(instance.name)) + "Found folder ids which are not related to " + "current project in instance: `{}`".format(instance.name)) @classmethod def get_invalid(cls, instance): - invalid = [] + nodes = instance[:] + if not nodes: + return # Get all id required nodes - id_required_nodes = lib.get_id_required_nodes(referenced_nodes=True, - nodes=instance[:]) + id_required_nodes = lib.get_id_required_nodes(referenced_nodes=False, + nodes=nodes) + if not id_required_nodes: + return # check ids against database ids - project_name = instance.context.data["projectName"] - folder_entities = ayon_api.get_folders(project_name, fields={"id"}) - folder_ids = { - folder_entity["id"] - for folder_entity in folder_entities - } + folder_ids = cls.get_project_folder_ids(context=instance.context) # Get all asset IDs + invalid = [] for node in id_required_nodes: cb_id = lib.get_id(node) @@ -71,3 +71,31 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin): invalid.append(node) return invalid + + @classmethod + def get_project_folder_ids(cls, context): + """Return all folder ids in the current project. + + Arguments: + context (pyblish.api.Context): The publish context. + + Returns: + set[str]: All folder ids in the current project. + + """ + # We query the database only for the first instance instead of + # per instance by storing a cache in the context + key = "__cache_project_folder_ids" + if key in context.data: + return context.data[key] + + # check ids against database + project_name = context.data["projectName"] + folder_entities = ayon_api.get_folders(project_name, fields={"id"}) + folder_ids = { + folder_entity["id"] + for folder_entity in folder_entities + } + + context.data[key] = folder_ids + return folder_ids diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_related.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_related.py index 992988dc7d..17eb58f421 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_related.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_related.py @@ -1,17 +1,27 @@ +import inspect +import uuid +from collections import defaultdict import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( OptionalPyblishPluginMixin, PublishValidationError, ValidatePipelineOrder) +from ayon_api import get_folders + + +def is_valid_uuid(value) -> bool: + """Return whether value is a valid UUID""" + try: + uuid.UUID(value) + except ValueError: + return False + return True class ValidateNodeIDsRelated(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): - """Validate nodes have a related Colorbleed Id to the - instance.data[folderPath] - - """ + """Validate nodes have a related `cbId` to the instance.data[folderPath]""" order = ValidatePipelineOrder label = 'Node Ids Related (ID)' @@ -39,21 +49,24 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin, # Ensure all nodes have a cbId invalid = self.get_invalid(instance) if invalid: + + invalid_list = "\n".join(f"- {node}" for node in sorted(invalid)) + raise PublishValidationError(( - "Nodes IDs found that are not related to folder '{}' : {}" - ).format( - instance.data["folderPath"], invalid - )) + "Nodes IDs found that are not related to folder '{}':\n{}" + ).format(instance.data["folderPath"], invalid_list), + description=self.get_description() + ) @classmethod def get_invalid(cls, instance): """Return the member nodes that are invalid""" - invalid = list() - folder_id = instance.data["folderEntity"]["id"] - # We do want to check the referenced nodes as we it might be + # We do want to check the referenced nodes as it might be # part of the end product + invalid = list() + nodes_by_other_folder_ids = defaultdict(set) for node in instance: _id = lib.get_id(node) if not _id: @@ -62,5 +75,48 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin, node_folder_id = _id.split(":", 1)[0] if node_folder_id != folder_id: invalid.append(node) + nodes_by_other_folder_ids[node_folder_id].add(node) + + # Log what other assets were found. + if nodes_by_other_folder_ids: + project_name = instance.context.data["projectName"] + other_folder_ids = set(nodes_by_other_folder_ids.keys()) + + # Remove folder ids that are not valid UUID identifiers, these + # may be legacy OpenPype ids + other_folder_ids = {folder_id for folder_id in other_folder_ids + if is_valid_uuid(folder_id)} + if not other_folder_ids: + return invalid + + folder_entities = get_folders(project_name=project_name, + folder_ids=other_folder_ids, + fields=["path"]) + if folder_entities: + # Log names of other assets detected + # We disregard logging nodes/ids for asset ids where no asset + # was found in the database because ValidateNodeIdsInDatabase + # takes care of that. + folder_paths = {entity["path"] for entity in folder_entities} + cls.log.error( + "Found nodes related to other folders:\n{}".format( + "\n".join(f"- {path}" for path in sorted(folder_paths)) + ) + ) return invalid + + @staticmethod + def get_description(): + return inspect.cleandoc("""### Node IDs must match folder id + + The node ids must match the folder entity id you are publishing to. + + Usually these mismatch occurs if you are re-using nodes from another + folder or project. + + #### How to repair? + + The repair action will regenerate new ids for + the invalid nodes to match the instance's folder. + """) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_unique.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_unique.py index f4994922ce..6b44a307d2 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_unique.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_unique.py @@ -8,6 +8,8 @@ from ayon_core.pipeline.publish import ( import ayon_core.hosts.maya.api.action from ayon_core.hosts.maya.api import lib +from maya import cmds + class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): """Validate the nodes in the instance have a unique Colorbleed Id @@ -41,7 +43,7 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): if invalid: label = "Nodes found with non-unique folder ids" raise PublishValidationError( - message="{}: {}".format(label, invalid), + message="{}, see log".format(label), title="Non-unique folder ids on nodes", description="{}\n- {}".format(label, "\n- ".join(sorted(invalid))) @@ -54,7 +56,6 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): # Check only non intermediate shapes # todo: must the instance itself ensure to have no intermediates? # todo: how come there are intermediates? - from maya import cmds instance_members = cmds.ls(instance, noIntermediate=True, long=True) # Collect each id with their members @@ -67,10 +68,14 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): # Take only the ids with more than one member invalid = list() - _iteritems = getattr(ids, "iteritems", ids.items) - for _ids, members in _iteritems(): + for members in ids.values(): if len(members) > 1: - cls.log.error("ID found on multiple nodes: '%s'" % members) + members_text = "\n".join( + "- {}".format(member) for member in sorted(members) + ) + cls.log.error( + "ID found on multiple nodes:\n{}".format(members_text) + ) invalid.extend(members) return invalid diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py b/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py index 0171318813..e186d74b89 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py @@ -1,4 +1,5 @@ import re +import inspect import pyblish.api from maya import cmds @@ -36,7 +37,10 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin, return invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError("Invalid cameras for render.") + raise PublishValidationError( + "Invalid render cameras.", + description=self.get_description() + ) @classmethod def get_invalid(cls, instance): @@ -51,17 +55,30 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin, RenderSettings.get_image_prefix_attr(renderer) ) - + renderlayer = instance.data["renderlayer"] if len(cameras) > 1: if re.search(cls.R_CAMERA_TOKEN, file_prefix): # if there is token in prefix and we have more then # 1 camera, all is ok. return - cls.log.error("Multiple renderable cameras found for %s: %s " % - (instance.data["setMembers"], cameras)) - return [instance.data["setMembers"]] + cameras + cls.log.error( + "Multiple renderable cameras found for %s: %s ", + renderlayer, ", ".join(cameras)) + return [renderlayer] + cameras elif len(cameras) < 1: - cls.log.error("No renderable cameras found for %s " % - instance.data["setMembers"]) - return [instance.data["setMembers"]] + cls.log.error("No renderable cameras found for %s ", renderlayer) + return [renderlayer] + + def get_description(self): + return inspect.cleandoc( + """### Render Cameras Invalid + + Your render cameras are misconfigured. You may have no render + camera set or have multiple cameras with a render filename + prefix that does not include the `` token. + + See the logs for more details about the cameras. + + """ + ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_resolution.py b/client/ayon_core/hosts/maya/plugins/publish/validate_resolution.py index 1e5a9a944c..d822dca288 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_resolution.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_resolution.py @@ -84,19 +84,11 @@ class ValidateResolution(pyblish.api.InstancePlugin, @classmethod def get_folder_resolution(cls, instance): - folder_attributes = instance.data["folderEntity"]["attrib"] - if ( - "resolutionWidth" in folder_attributes - and "resolutionHeight" in folder_attributes - and "pixelAspect" in folder_attributes - ): - width = folder_attributes["resolutionWidth"] - height = folder_attributes["resolutionHeight"] - pixelAspect = folder_attributes["pixelAspect"] - return int(width), int(height), float(pixelAspect) - - # Defaults if not found in asset document or project document - return 1920, 1080, 1.0 + task_attributes = instance.data["taskEntity"]["attrib"] + width = task_attributes["resolutionWidth"] + height = task_attributes["resolutionHeight"] + pixel_aspect = task_attributes["pixelAspect"] + return int(width), int(height), float(pixel_aspect) @classmethod def repair(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_scene_set_workspace.py b/client/ayon_core/hosts/maya/plugins/publish/validate_scene_set_workspace.py index 6e68cf5d14..c7d5de2050 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_scene_set_workspace.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_scene_set_workspace.py @@ -46,6 +46,6 @@ class ValidateSceneSetWorkspace(pyblish.api.ContextPlugin): raise PublishValidationError( "Maya workspace is not set correctly.\n\n" f"Current workfile `{scene_name}` is not inside the " - "current Maya project root directory `{root_dir}`.\n\n" + f"current Maya project root directory `{root_dir}`.\n\n" "Please use Workfile app to re-save." ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_render_stats.py b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_render_stats.py index 2783a6dbe8..52ce3c5436 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_render_stats.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_render_stats.py @@ -6,11 +6,12 @@ import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( RepairAction, ValidateMeshOrder, + PublishValidationError, OptionalPyblishPluginMixin ) -class ValidateShapeRenderStats(pyblish.api.Validator, +class ValidateShapeRenderStats(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure all render stats are set to the default values.""" @@ -20,7 +21,6 @@ class ValidateShapeRenderStats(pyblish.api.Validator, label = 'Shape Default Render Stats' actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, RepairAction] - optional = True defaults = {'castsShadows': 1, 'receiveShadows': 1, @@ -37,14 +37,13 @@ class ValidateShapeRenderStats(pyblish.api.Validator, # It seems the "surfaceShape" and those derived from it have # `renderStat` attributes. shapes = cmds.ls(instance, long=True, type='surfaceShape') - invalid = [] + invalid = set() for shape in shapes: - _iteritems = getattr(cls.defaults, "iteritems", cls.defaults.items) - for attr, default_value in _iteritems(): + for attr, default_value in cls.defaults.items(): if cmds.attributeQuery(attr, node=shape, exists=True): value = cmds.getAttr('{}.{}'.format(shape, attr)) if value != default_value: - invalid.append(shape) + invalid.add(shape) return invalid @@ -52,17 +51,36 @@ class ValidateShapeRenderStats(pyblish.api.Validator, if not self.is_active(instance.data): return invalid = self.get_invalid(instance) + if not invalid: + return - if invalid: - raise ValueError("Shapes with non-default renderStats " - "found: {0}".format(invalid)) + defaults_str = "\n".join( + "- {}: {}\n".format(key, value) + for key, value in self.defaults.items() + ) + description = ( + "## Shape Default Render Stats\n" + "Shapes are detected with non-default render stats.\n\n" + "To ensure a model's shapes behave like a shape would by default " + "we require the render stats to have not been altered in " + "the published models.\n\n" + "### How to repair?\n" + "You can reset the default values on the shapes by using the " + "repair action." + ) + + raise PublishValidationError( + "Shapes with non-default renderStats " + "found: {0}".format(", ".join(sorted(invalid))), + description=description, + detail="The expected default values " + "are:\n\n{}".format(defaults_str) + ) @classmethod def repair(cls, instance): for shape in cls.get_invalid(instance): - _iteritems = getattr(cls.defaults, "iteritems", cls.defaults.items) - for attr, default_value in _iteritems(): - + for attr, default_value in cls.defaults.items(): if cmds.attributeQuery(attr, node=shape, exists=True): plug = '{0}.{1}'.format(shape, attr) value = cmds.getAttr(plug) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_zero.py b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_zero.py index 4f4826776c..6c89258085 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_zero.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_zero.py @@ -12,7 +12,7 @@ from ayon_core.pipeline.publish import ( ) -class ValidateShapeZero(pyblish.api.Validator, +class ValidateShapeZero(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Shape components may not have any "tweak" values diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_transform_zero.py b/client/ayon_core/hosts/maya/plugins/publish/validate_transform_zero.py index 1cbdd05b0b..cd96ebb10d 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_transform_zero.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_transform_zero.py @@ -1,5 +1,6 @@ -from maya import cmds +import inspect +from maya import cmds import pyblish.api import ayon_core.hosts.maya.api.action @@ -10,7 +11,7 @@ from ayon_core.pipeline.publish import ( ) -class ValidateTransformZero(pyblish.api.Validator, +class ValidateTransformZero(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Transforms can't have any values @@ -57,7 +58,7 @@ class ValidateTransformZero(pyblish.api.Validator, if ('_LOC' in transform) or ('_loc' in transform): continue mat = cmds.xform(transform, q=1, matrix=True, objectSpace=True) - if not all(abs(x-y) < cls._tolerance + if not all(abs(x - y) < cls._tolerance for x, y in zip(cls._identity, mat)): invalid.append(transform) @@ -69,14 +70,24 @@ class ValidateTransformZero(pyblish.api.Validator, return invalid = self.get_invalid(instance) if invalid: - names = "
".join( " - {}".format(node) for node in invalid ) raise PublishValidationError( title="Transform Zero", + description=self.get_description(), message="The model publish allows no transformations. You must" " freeze transformations to continue.

" - "Nodes found with transform values: " + "Nodes found with transform values:
" "{0}".format(names)) + + @staticmethod + def get_description(): + return inspect.cleandoc("""### Transform can't have any values + + The model publish allows no transformations. + + You must **freeze transformations** to continue. + + """) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_unique_names.py b/client/ayon_core/hosts/maya/plugins/publish/validate_unique_names.py index 72c3c7dc72..0066d70531 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_unique_names.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_unique_names.py @@ -9,7 +9,7 @@ from ayon_core.pipeline.publish import ( ) -class ValidateUniqueNames(pyblish.api.Validator, +class ValidateUniqueNames(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """transform names should be unique diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py index aa229875fe..77e189e37b 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py @@ -10,7 +10,7 @@ from ayon_core.pipeline.publish import ( ) -class ValidateYetiRigInputShapesInInstance(pyblish.api.Validator, +class ValidateYetiRigInputShapesInInstance(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate if all input nodes are part of the instance's hierarchy""" diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py index 75c82164c2..ad43a24385 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py @@ -49,8 +49,9 @@ def get_selected_nodes(): """Get information from current selection""" selection = cmds.ls(selection=True, long=True) - hierarchy = lib.get_all_children(selection) - return list(set(selection + hierarchy)) + hierarchy = lib.get_all_children(selection, + ignore_intermediate_objects=True) + return list(hierarchy.union(selection)) def get_all_asset_nodes(): diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py index 74cdbeb7d4..88ef4b201a 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py @@ -51,7 +51,7 @@ def assign_vrayproxy_shaders(vrayproxy, assignments): index += 1 -def vrayproxy_assign_look(vrayproxy, product_name="lookDefault"): +def vrayproxy_assign_look(vrayproxy, product_name="lookMain"): # type: (str, str) -> None """Assign look to vray proxy. diff --git a/client/ayon_core/vendor/python/common/capture.py b/client/ayon_core/hosts/maya/vendor/python/capture.py similarity index 99% rename from client/ayon_core/vendor/python/common/capture.py rename to client/ayon_core/hosts/maya/vendor/python/capture.py index 224699f916..4ccfdb35f3 100644 --- a/client/ayon_core/vendor/python/common/capture.py +++ b/client/ayon_core/hosts/maya/vendor/python/capture.py @@ -12,11 +12,7 @@ import logging from maya import cmds from maya import mel -try: - from PySide2 import QtGui, QtWidgets -except ImportError: - from PySide import QtGui - QtWidgets = QtGui +from qtpy import QtGui, QtWidgets version_info = (2, 3, 0) @@ -873,7 +869,11 @@ def _get_screen_size(): if _in_standalone(): return [0, 0] - rect = QtWidgets.QDesktopWidget().screenGeometry(-1) + try: + rect = QtWidgets.QDesktopWidget().screenGeometry(-1) + except AttributeError: + # in Qt6 it is a different call + rect = QtWidgets.QApplication.primaryScreen().availableGeometry() return [rect.width(), rect.height()] diff --git a/client/ayon_core/hosts/nuke/api/lib.py b/client/ayon_core/hosts/nuke/api/lib.py index 4fcba8d2d4..78cbe85097 100644 --- a/client/ayon_core/hosts/nuke/api/lib.py +++ b/client/ayon_core/hosts/nuke/api/lib.py @@ -2627,11 +2627,11 @@ class NukeDirmap(HostDirmap): class DirmapCache: - """Caching class to get settings and sync_module easily and only once.""" + """Caching class to get settings and sitesync easily and only once.""" _project_name = None _project_settings = None - _sync_module_discovered = False - _sync_module = None + _sitesync_addon_discovered = False + _sitesync_addon = None _mapping = None @classmethod @@ -2647,11 +2647,11 @@ class DirmapCache: return cls._project_settings @classmethod - def sync_module(cls): - if not cls._sync_module_discovered: - cls._sync_module_discovered = True - cls._sync_module = AddonsManager().get("sync_server") - return cls._sync_module + def sitesync_addon(cls): + if not cls._sitesync_addon_discovered: + cls._sitesync_addon_discovered = True + cls._sitesync_addon = AddonsManager().get("sitesync") + return cls._sitesync_addon @classmethod def mapping(cls): @@ -2673,7 +2673,7 @@ def dirmap_file_name_filter(file_name): "nuke", DirmapCache.project_name(), DirmapCache.project_settings(), - DirmapCache.sync_module(), + DirmapCache.sitesync_addon(), ) if not DirmapCache.mapping(): DirmapCache.set_mapping(dirmap_processor.get_mappings()) diff --git a/client/ayon_core/hosts/nuke/api/plugin.py b/client/ayon_core/hosts/nuke/api/plugin.py index d9cae934d7..5b97fab0c2 100644 --- a/client/ayon_core/hosts/nuke/api/plugin.py +++ b/client/ayon_core/hosts/nuke/api/plugin.py @@ -5,7 +5,7 @@ import sys import six import random import string -from collections import OrderedDict, defaultdict +from collections import defaultdict from ayon_core.settings import get_current_project_settings from ayon_core.lib import ( @@ -904,7 +904,7 @@ class ExporterReviewMov(ExporterReview): node, product_name, "Reposition node... `{}`" ) # append reformatted tag - add_tags.append("reformated") + add_tags.append("reformatted") # only create colorspace baking if toggled on if bake_viewer_process: diff --git a/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py b/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py index 2f6d121af5..afef3ba843 100644 --- a/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py +++ b/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook +from ayon_applications import PreLaunchHook class PrelaunchNukeAssistHook(PreLaunchHook): diff --git a/client/ayon_core/hosts/nuke/plugins/load/actions.py b/client/ayon_core/hosts/nuke/plugins/load/actions.py index a1b3697ef1..53cb03087b 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/actions.py +++ b/client/ayon_core/hosts/nuke/plugins/load/actions.py @@ -19,7 +19,7 @@ class SetFrameRangeLoader(load.LoaderPlugin): "yeticache", "pointcache", } - representations = ["*"] + representations = {"*"} extensions = {"*"} label = "Set frame range" @@ -53,7 +53,7 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): "yeticache", "pointcache", } - representations = ["*"] + representations = {"*"} label = "Set frame range (with handles)" order = 12 diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_backdrop.py b/client/ayon_core/hosts/nuke/plugins/load/load_backdrop.py index e48dbf5e2f..7d823919dc 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_backdrop.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_backdrop.py @@ -22,7 +22,7 @@ class LoadBackdropNodes(load.LoaderPlugin): """Loading Published Backdrop nodes (workfile, nukenodes)""" product_types = {"workfile", "nukenodes"} - representations = ["*"] + representations = {"*"} extensions = {"nk"} label = "Import Nuke Nodes" diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_camera_abc.py b/client/ayon_core/hosts/nuke/plugins/load/load_camera_abc.py index 70b736b1c8..14c54c3adc 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_camera_abc.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_camera_abc.py @@ -21,7 +21,7 @@ class AlembicCameraLoader(load.LoaderPlugin): """ product_types = {"camera"} - representations = ["*"] + representations = {"*"} extensions = {"abc"} label = "Load Alembic Camera" diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_clip.py b/client/ayon_core/hosts/nuke/plugins/load/load_clip.py index 8a41d854d9..df8f2ab018 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_clip.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_clip.py @@ -42,7 +42,7 @@ class LoadClip(plugin.NukeLoader): "prerender", "review", } - representations = ["*"] + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) ) @@ -130,6 +130,18 @@ class LoadClip(plugin.NukeLoader): first = 1 last = first + duration + # If a slate is present, the frame range is 1 frame longer for movies, + # but file sequences its the first frame that is 1 frame lower. + slate_frames = repre_entity["data"].get("slateFrames", 0) + extension = "." + repre_entity["context"]["ext"] + + if extension in VIDEO_EXTENSIONS: + last += slate_frames + + files_count = len(repre_entity["files"]) + if extension in IMAGE_EXTENSIONS and files_count != 1: + first -= slate_frames + # Fallback to folder name when namespace is None if namespace is None: namespace = context["folder"]["name"] @@ -167,7 +179,9 @@ class LoadClip(plugin.NukeLoader): repre_entity ) - self._set_range_to_node(read_node, first, last, start_at_workfile) + self._set_range_to_node( + read_node, first, last, start_at_workfile, slate_frames + ) version_name = version_entity["version"] if version_name < 0: @@ -402,14 +416,21 @@ class LoadClip(plugin.NukeLoader): for member in members: nuke.delete(member) - def _set_range_to_node(self, read_node, first, last, start_at_workfile): + def _set_range_to_node( + self, read_node, first, last, start_at_workfile, slate_frames=0 + ): read_node['origfirst'].setValue(int(first)) read_node['first'].setValue(int(first)) read_node['origlast'].setValue(int(last)) read_node['last'].setValue(int(last)) # set start frame depending on workfile or version - self._loader_shift(read_node, start_at_workfile) + if start_at_workfile: + read_node['frame_mode'].setValue("start at") + + start_frame = self.script_start - slate_frames + + read_node['frame'].setValue(str(start_frame)) def _make_retimes(self, parent_node, version_data): ''' Create all retime and timewarping nodes with copied animation ''' @@ -466,18 +487,6 @@ class LoadClip(plugin.NukeLoader): for i, n in enumerate(dependent_nodes): last_node.setInput(i, n) - def _loader_shift(self, read_node, workfile_start=False): - """ Set start frame of read node to a workfile start - - Args: - read_node (nuke.Node): The nuke's read node - workfile_start (bool): set workfile start frame if true - - """ - if workfile_start: - read_node['frame_mode'].setValue("start at") - read_node['frame'].setValue(str(self.script_start)) - def _get_node_name(self, context): folder_entity = context["folder"] product_name = context["product"]["name"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_effects.py b/client/ayon_core/hosts/nuke/plugins/load/load_effects.py index f17b179d1b..a87c81295a 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_effects.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_effects.py @@ -19,7 +19,7 @@ class LoadEffects(load.LoaderPlugin): """Loading colorspace soft effect exported from nukestudio""" product_types = {"effect"} - representations = ["*"] + representations = {"*"} extensions = {"json"} label = "Load Effects - nodes" diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_effects_ip.py b/client/ayon_core/hosts/nuke/plugins/load/load_effects_ip.py index 6b58977a95..8fa1347598 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_effects_ip.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_effects_ip.py @@ -20,7 +20,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin): """Loading colorspace soft effect exported from nukestudio""" product_types = {"effect"} - representations = ["*"] + representations = {"*"} extensions = {"json"} label = "Load Effects - Input Process" diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_gizmo.py b/client/ayon_core/hosts/nuke/plugins/load/load_gizmo.py index 6709648ffb..95f85bacfc 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_gizmo.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_gizmo.py @@ -22,7 +22,7 @@ class LoadGizmo(load.LoaderPlugin): """Loading nuke Gizmo""" product_types = {"gizmo"} - representations = ["*"] + representations = {"*"} extensions = {"nk"} label = "Load Gizmo" diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_gizmo_ip.py b/client/ayon_core/hosts/nuke/plugins/load/load_gizmo_ip.py index 3017fa5fc9..3112e27811 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_gizmo_ip.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_gizmo_ip.py @@ -24,7 +24,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin): """Loading colorspace soft effect exported from nukestudio""" product_types = {"gizmo"} - representations = ["*"] + representations = {"*"} extensions = {"nk"} label = "Load Gizmo - Input Process" diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_image.py b/client/ayon_core/hosts/nuke/plugins/load/load_image.py index 9d3f9ceea0..d825b621fc 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_image.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_image.py @@ -32,7 +32,7 @@ class LoadImage(load.LoaderPlugin): "review", "image", } - representations = ["*"] + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS ) diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_matchmove.py b/client/ayon_core/hosts/nuke/plugins/load/load_matchmove.py index 73c21376b4..beebd0458f 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_matchmove.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_matchmove.py @@ -8,7 +8,7 @@ class MatchmoveLoader(load.LoaderPlugin): """ product_types = {"matchmove"} - representations = ["*"] + representations = {"*"} extensions = {"py"} defaults = ["Camera", "Object"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_model.py b/client/ayon_core/hosts/nuke/plugins/load/load_model.py index 971c36b6cf..0326e0a4fc 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_model.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_model.py @@ -19,7 +19,7 @@ class AlembicModelLoader(load.LoaderPlugin): """ product_types = {"model", "pointcache", "animation"} - representations = ["*"] + representations = {"*"} extensions = {"abc"} label = "Load Alembic" diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_ociolook.py b/client/ayon_core/hosts/nuke/plugins/load/load_ociolook.py index 82addcdfc0..c369030b65 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_ociolook.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_ociolook.py @@ -21,7 +21,7 @@ class LoadOcioLookNodes(load.LoaderPlugin): """Loading Ocio look to the nuke.Node graph""" product_types = {"ociolook"} - representations = ["*"] + representations = {"*"} extensions = {"json"} label = "Load OcioLook [nodes]" diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_script_precomp.py b/client/ayon_core/hosts/nuke/plugins/load/load_script_precomp.py index ccc4164355..3e554f9d3b 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_script_precomp.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_script_precomp.py @@ -17,7 +17,7 @@ class LinkAsGroup(load.LoaderPlugin): """Copy the published file to be pasted at the desired location""" product_types = {"workfile", "nukenodes"} - representations = ["*"] + representations = {"*"} extensions = {"nk"} label = "Load Precomp" diff --git a/client/ayon_core/hosts/nuke/plugins/publish/extract_script_save.py b/client/ayon_core/hosts/nuke/plugins/publish/extract_script_save.py index e44e5686b6..d325684a7c 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/extract_script_save.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/extract_script_save.py @@ -2,10 +2,10 @@ import nuke import pyblish.api -class ExtractScriptSave(pyblish.api.Extractor): +class ExtractScriptSave(pyblish.api.InstancePlugin): """Save current Nuke workfile script""" label = 'Script Save' - order = pyblish.api.Extractor.order - 0.1 + order = pyblish.api.ExtractorOrder - 0.1 hosts = ['nuke'] def process(self, instance): diff --git a/client/ayon_core/hosts/nuke/plugins/publish/extract_slate_frame.py b/client/ayon_core/hosts/nuke/plugins/publish/extract_slate_frame.py index c013da84d2..627888ac92 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -300,6 +300,10 @@ class ExtractSlateFrame(publish.Extractor): self.log.debug( "__ matching_repre: {}".format(pformat(matching_repre))) + data = matching_repre.get("data", {}) + data["slateFrames"] = 1 + matching_repre["data"] = data + self.log.info("Added slate frame to representation files") def add_comment_slate_node(self, instance, node): diff --git a/client/ayon_core/hosts/photoshop/api/README.md b/client/ayon_core/hosts/photoshop/api/README.md index c936c1ec1f..b391131a42 100644 --- a/client/ayon_core/hosts/photoshop/api/README.md +++ b/client/ayon_core/hosts/photoshop/api/README.md @@ -207,7 +207,7 @@ class ImageLoader(load.LoaderPlugin): """ families = ["image"] - representations = ["*"] + representations = {"*"} def load(self, context, name=None, namespace=None, data=None): path = self.filepath_from_context(context) diff --git a/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py b/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py index 8358c11ca1..70f8fc730f 100644 --- a/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py +++ b/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py @@ -6,10 +6,7 @@ from ayon_core.lib import ( get_ayon_launcher_args, is_using_ayon_console, ) -from ayon_core.lib.applications import ( - PreLaunchHook, - LaunchTypes, -) +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.photoshop import get_launch_script_path diff --git a/client/ayon_core/hosts/photoshop/plugins/load/load_image.py b/client/ayon_core/hosts/photoshop/plugins/load/load_image.py index 72df2706b5..d71067615e 100644 --- a/client/ayon_core/hosts/photoshop/plugins/load/load_image.py +++ b/client/ayon_core/hosts/photoshop/plugins/load/load_image.py @@ -12,7 +12,7 @@ class ImageLoader(photoshop.PhotoshopLoader): """ product_types = {"image", "render"} - representations = ["*"] + representations = {"*"} def load(self, context, name=None, namespace=None, data=None): stub = self.get_stub() diff --git a/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py b/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py index 73e8c3683c..dd14543f3e 100644 --- a/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py +++ b/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py @@ -25,7 +25,7 @@ class ImageFromSequenceLoader(photoshop.PhotoshopLoader): """ product_types = {"render"} - representations = ["*"] + representations = {"*"} options = [] def load(self, context, name=None, namespace=None, data=None): diff --git a/client/ayon_core/hosts/photoshop/plugins/load/load_reference.py b/client/ayon_core/hosts/photoshop/plugins/load/load_reference.py index 7cd34690f7..b563faff82 100644 --- a/client/ayon_core/hosts/photoshop/plugins/load/load_reference.py +++ b/client/ayon_core/hosts/photoshop/plugins/load/load_reference.py @@ -15,7 +15,7 @@ class ReferenceLoader(photoshop.PhotoshopLoader): """ product_types = {"image", "render"} - representations = ["*"] + representations = {"*"} def load(self, context, name=None, namespace=None, data=None): stub = self.get_stub() diff --git a/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py b/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py index d82651289c..cf9953bfe9 100644 --- a/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py +++ b/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class PreLaunchResolveLastWorkfile(PreLaunchHook): diff --git a/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py b/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py index c14fd75b2f..f45e28d5ab 100644 --- a/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py +++ b/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py @@ -1,7 +1,7 @@ import os from pathlib import Path import platform -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.resolve.utils import setup diff --git a/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py b/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py index ab16053450..300564f7cc 100644 --- a/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py +++ b/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py @@ -1,6 +1,6 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes import ayon_core.hosts.resolve diff --git a/client/ayon_core/hosts/resolve/plugins/load/load_clip.py b/client/ayon_core/hosts/resolve/plugins/load/load_clip.py index c7bced5e8e..2ce1c43957 100644 --- a/client/ayon_core/hosts/resolve/plugins/load/load_clip.py +++ b/client/ayon_core/hosts/resolve/plugins/load/load_clip.py @@ -20,7 +20,7 @@ class LoadClip(plugin.TimelineItemLoader): product_types = {"render2d", "source", "plate", "render", "review"} - representations = ["*"] + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) ) diff --git a/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py b/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py index f204ff7728..f46afadb5a 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py +++ b/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py @@ -144,7 +144,8 @@ class CreateTextures(Creator): 9: "512", 10: "1024", 11: "2048", - 12: "4096" + 12: "4096", + 13: "8192" }, default=None, label="Size"), diff --git a/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py b/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py index f2254c0907..01cb65dd5c 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py +++ b/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py @@ -18,7 +18,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): """Load mesh for project""" product_types = {"*"} - representations = ["abc", "fbx", "obj", "gltf"] + representations = {"abc", "fbx", "obj", "gltf", "usd", "usda", "usdc"} label = "Load mesh" order = -10 diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py index 843729786c..4057aee9a6 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py @@ -402,7 +402,7 @@ or updating already created. Publishing will create OTIO file. ): continue - instance = self._make_product_instance( + self._make_product_instance( otio_clip, product_type_preset, deepcopy(base_instance_data), diff --git a/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py b/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py index 25e324c5cc..691b81e089 100644 --- a/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py +++ b/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py @@ -1,5 +1,5 @@ from ayon_core.lib import get_ayon_launcher_args -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class TvpaintPrelaunchHook(PreLaunchHook): diff --git a/client/ayon_core/hosts/tvpaint/plugins/load/load_image.py b/client/ayon_core/hosts/tvpaint/plugins/load/load_image.py index e954b72f12..aad8f92d26 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/load/load_image.py +++ b/client/ayon_core/hosts/tvpaint/plugins/load/load_image.py @@ -7,7 +7,7 @@ class ImportImage(plugin.Loader): """Load image or image sequence to TVPaint as new layer.""" product_types = {"render", "image", "background", "plate", "review"} - representations = ["*"] + representations = {"*"} label = "Import Image" order = 1 diff --git a/client/ayon_core/hosts/tvpaint/plugins/load/load_reference_image.py b/client/ayon_core/hosts/tvpaint/plugins/load/load_reference_image.py index ec671b0cb1..a7fcb9f4a4 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/load/load_reference_image.py +++ b/client/ayon_core/hosts/tvpaint/plugins/load/load_reference_image.py @@ -18,7 +18,7 @@ class LoadImage(plugin.Loader): """Load image or image sequence to TVPaint as new layer.""" product_types = {"render", "image", "background", "plate", "review"} - representations = ["*"] + representations = {"*"} label = "Load Image" order = 1 diff --git a/client/ayon_core/hosts/tvpaint/plugins/load/load_sound.py b/client/ayon_core/hosts/tvpaint/plugins/load/load_sound.py index d8c6a7a430..7e8c8022d6 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/load/load_sound.py +++ b/client/ayon_core/hosts/tvpaint/plugins/load/load_sound.py @@ -23,7 +23,7 @@ class ImportSound(plugin.Loader): """ product_types = {"audio", "review", "plate"} - representations = ["*"] + representations = {"*"} label = "Import Sound" order = 1 diff --git a/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py b/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py index 8663dac997..07c2d91533 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py +++ b/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py @@ -1,6 +1,5 @@ import os -from ayon_core.lib import StringTemplate from ayon_core.pipeline import ( registered_host, get_current_context, @@ -25,7 +24,7 @@ class LoadWorkfile(plugin.Loader): """Load workfile.""" product_types = {"workfile"} - representations = ["tvpp"] + representations = {"tvpp"} label = "Load Workfile" @@ -111,8 +110,6 @@ class LoadWorkfile(plugin.Loader): data["version"] = version - filename = StringTemplate.format_strict_template( - file_template, data - ) + filename = work_template["file"].format_strict(data) path = os.path.join(work_root, filename) host.save_workfile(path) diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py b/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py index ab30e3dc10..fe5e148b7b 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -25,8 +25,9 @@ from ayon_core.hosts.tvpaint.lib import ( ) -class ExtractSequence(pyblish.api.Extractor): +class ExtractSequence(pyblish.api.InstancePlugin): label = "Extract Sequence" + order = pyblish.api.ExtractorOrder hosts = ["tvpaint"] families = ["review", "render"] diff --git a/client/ayon_core/hosts/unreal/api/__init__.py b/client/ayon_core/hosts/unreal/api/__init__.py index ac6a91eae9..7e7f839f27 100644 --- a/client/ayon_core/hosts/unreal/api/__init__.py +++ b/client/ayon_core/hosts/unreal/api/__init__.py @@ -28,9 +28,11 @@ from .pipeline import ( ) __all__ = [ + "UnrealActorCreator", + "UnrealAssetCreator", + "Loader", "install", "uninstall", - "Loader", "ls", "publish", "containerise", diff --git a/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py b/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py index 54ffba3a63..e38591f65d 100644 --- a/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py +++ b/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py @@ -9,7 +9,7 @@ from pathlib import Path from qtpy import QtCore from ayon_core import resources -from ayon_core.lib.applications import ( +from ayon_applications import ( PreLaunchHook, ApplicationLaunchFailed, LaunchTypes, diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_alembic_animation.py b/client/ayon_core/hosts/unreal/plugins/load/load_alembic_animation.py index 64d684939c..a12f4f41b4 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_alembic_animation.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_alembic_animation.py @@ -16,7 +16,7 @@ class AnimationAlembicLoader(plugin.Loader): product_types = {"animation"} label = "Import Alembic Animation" - representations = ["abc"] + representations = {"abc"} icon = "cube" color = "orange" diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_animation.py b/client/ayon_core/hosts/unreal/plugins/load/load_animation.py index 0f51ac39e0..f6a612ce53 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_animation.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_animation.py @@ -8,7 +8,7 @@ from unreal import EditorAssetLibrary from unreal import MovieSceneSkeletalAnimationTrack from unreal import MovieSceneSkeletalAnimationSection -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.pipeline import ( get_representation_path, AYON_CONTAINER_ID @@ -22,7 +22,7 @@ class AnimationFBXLoader(plugin.Loader): product_types = {"animation"} label = "Import FBX Animation" - representations = ["fbx"] + representations = {"fbx"} icon = "cube" color = "orange" @@ -53,7 +53,7 @@ class AnimationFBXLoader(plugin.Loader): if not actor: return None - folder_entity = get_current_project_folder(fields=["attrib.fps"]) + folder_entity = get_current_folder_entity(fields=["attrib.fps"]) task.set_editor_property('filename', path) task.set_editor_property('destination_path', asset_dir) @@ -256,7 +256,7 @@ class AnimationFBXLoader(plugin.Loader): repre_entity = context["representation"] folder_name = container["asset_name"] source_path = get_representation_path(repre_entity) - folder_entity = get_current_project_folder(fields=["attrib.fps"]) + folder_entity = get_current_folder_entity(fields=["attrib.fps"]) destination_path = container["namespace"] task = unreal.AssetImportTask() diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_camera.py b/client/ayon_core/hosts/unreal/plugins/load/load_camera.py index 285834c911..681c83c6a1 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_camera.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_camera.py @@ -30,7 +30,7 @@ class CameraLoader(plugin.Loader): product_types = {"camera"} label = "Load Camera" - representations = ["fbx"] + representations = {"fbx"} icon = "cube" color = "orange" diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_geometrycache_abc.py b/client/ayon_core/hosts/unreal/plugins/load/load_geometrycache_abc.py index 44c308069b..ae7d41192a 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_geometrycache_abc.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_geometrycache_abc.py @@ -21,7 +21,7 @@ class PointCacheAlembicLoader(plugin.Loader): product_types = {"model", "pointcache"} label = "Import Alembic Point Cache" - representations = ["abc"] + representations = {"abc"} icon = "cube" color = "orange" diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_layout.py b/client/ayon_core/hosts/unreal/plugins/load/load_layout.py index 6c01925453..49d95c6459 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_layout.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_layout.py @@ -25,7 +25,7 @@ from ayon_core.pipeline import ( AYON_CONTAINER_ID, get_current_project_name, ) -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.settings import get_current_project_settings from ayon_core.hosts.unreal.api import plugin from ayon_core.hosts.unreal.api.pipeline import ( @@ -41,7 +41,7 @@ class LayoutLoader(plugin.Loader): """Load Layout from a JSON file""" product_types = {"layout"} - representations = ["json"] + representations = {"json"} label = "Load Layout" icon = "code-fork" @@ -169,7 +169,7 @@ class LayoutLoader(plugin.Loader): anim_path = f"{asset_dir}/animations/{anim_file_name}" - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() # Import animation task = unreal.AssetImportTask() task.options = unreal.FbxImportUI() diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_layout_existing.py b/client/ayon_core/hosts/unreal/plugins/load/load_layout_existing.py index 56e36f6185..f9d438367b 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_layout_existing.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_layout_existing.py @@ -22,7 +22,7 @@ class ExistingLayoutLoader(plugin.Loader): """ product_types = {"layout"} - representations = ["json"] + representations = {"json"} label = "Load Layout on Existing Scene" icon = "code-fork" diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_abc.py b/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_abc.py index 64b1810080..dfc5d58708 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_abc.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_abc.py @@ -20,7 +20,7 @@ class SkeletalMeshAlembicLoader(plugin.Loader): product_types = {"pointcache", "skeletalMesh"} label = "Import Alembic Skeletal Mesh" - representations = ["abc"] + representations = {"abc"} icon = "cube" color = "orange" diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py b/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py index f61f0dbc3f..513404ab98 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py @@ -20,7 +20,7 @@ class SkeletalMeshFBXLoader(plugin.Loader): product_types = {"rig", "skeletalMesh"} label = "Import FBX Skeletal Mesh" - representations = ["fbx"] + representations = {"fbx"} icon = "cube" color = "orange" diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_abc.py b/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_abc.py index 256cb9e8bc..0bf6ce9eaa 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_abc.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_abc.py @@ -20,7 +20,7 @@ class StaticMeshAlembicLoader(plugin.Loader): product_types = {"model", "staticMesh"} label = "Import Alembic Static Mesh" - representations = ["abc"] + representations = {"abc"} icon = "cube" color = "orange" diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_fbx.py b/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_fbx.py index 0ec4b1b4f8..b7bb57ac23 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_fbx.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_fbx.py @@ -20,7 +20,7 @@ class StaticMeshFBXLoader(plugin.Loader): product_types = {"model", "staticMesh"} label = "Import FBX Static Mesh" - representations = ["fbx"] + representations = {"fbx"} icon = "cube" color = "orange" diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_uasset.py b/client/ayon_core/hosts/unreal/plugins/load/load_uasset.py index 89ef357c89..63f23ecc11 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_uasset.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_uasset.py @@ -17,7 +17,7 @@ class UAssetLoader(plugin.Loader): product_types = {"uasset"} label = "Load UAsset" - representations = ["uasset"] + representations = {"uasset"} icon = "cube" color = "orange" @@ -166,6 +166,6 @@ class UMapLoader(UAssetLoader): product_types = {"uasset"} label = "Load Level" - representations = ["umap"] + representations = {"umap"} extension = "umap" diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_yeticache.py b/client/ayon_core/hosts/unreal/plugins/load/load_yeticache.py index 21715a24c6..708fc83745 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_yeticache.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_yeticache.py @@ -17,7 +17,7 @@ class YetiLoader(plugin.Loader): product_types = {"yeticacheUE"} label = "Import Yeti" - representations = ["abc"] + representations = {"abc"} icon = "pagelines" color = "orange" diff --git a/client/ayon_core/lib/__init__.py b/client/ayon_core/lib/__init__.py index 2ee7eecfe3..408262ca42 100644 --- a/client/ayon_core/lib/__init__.py +++ b/client/ayon_core/lib/__init__.py @@ -120,22 +120,6 @@ from .transcoding import ( get_rescaled_command_arguments, ) -from .applications import ( - ApplicationLaunchFailed, - ApplictionExecutableNotFound, - ApplicationNotFound, - ApplicationManager, - - PreLaunchHook, - PostLaunchHook, - - EnvironmentPrepData, - prepare_app_environments, - prepare_context_environments, - get_app_environments_for_context, - apply_project_environments_value -) - from .plugin_tools import ( prepare_template_data, source_hash, @@ -231,18 +215,6 @@ __all__ = [ "convert_ffprobe_fps_to_float", "get_rescaled_command_arguments", - "ApplicationLaunchFailed", - "ApplictionExecutableNotFound", - "ApplicationNotFound", - "ApplicationManager", - "PreLaunchHook", - "PostLaunchHook", - "EnvironmentPrepData", - "prepare_app_environments", - "prepare_context_environments", - "get_app_environments_for_context", - "apply_project_environments_value", - "compile_list_of_regexes", "filter_profiles", diff --git a/client/ayon_core/lib/applications.py b/client/ayon_core/lib/applications.py deleted file mode 100644 index 2db32cbfaa..0000000000 --- a/client/ayon_core/lib/applications.py +++ /dev/null @@ -1,1893 +0,0 @@ -import os -import sys -import copy -import json -import tempfile -import platform -import collections -import inspect -import subprocess -from abc import ABCMeta, abstractmethod - -import six - -from ayon_core import AYON_CORE_ROOT -from ayon_core.settings import get_project_settings, get_studio_settings -from .log import Logger -from .profiles_filtering import filter_profiles -from .local_settings import get_ayon_username - -from .python_module_tools import ( - modules_from_path, - classes_from_module -) -from .execute import ( - find_executable, - get_linux_launcher_args -) - -_logger = None - -PLATFORM_NAMES = {"windows", "linux", "darwin"} -DEFAULT_ENV_SUBGROUP = "standard" -CUSTOM_LAUNCH_APP_GROUPS = { - "djvview" -} - - -class LaunchTypes: - """Launch types are filters for pre/post-launch hooks. - - Please use these variables in case they'll change values. - """ - - # Local launch - application is launched on local machine - local = "local" - # Farm render job - application is on farm - farm_render = "farm-render" - # Farm publish job - integration post-render job - farm_publish = "farm-publish" - # Remote launch - application is launched on remote machine from which - # can be started publishing - remote = "remote" - # Automated launch - application is launched with automated publishing - automated = "automated" - - -def parse_environments(env_data, env_group=None, platform_name=None): - """Parse environment values from settings byt group and platform. - - Data may contain up to 2 hierarchical levels of dictionaries. At the end - of the last level must be string or list. List is joined using platform - specific joiner (';' for windows and ':' for linux and mac). - - Hierarchical levels can contain keys for subgroups and platform name. - Platform specific values must be always last level of dictionary. Platform - names are "windows" (MS Windows), "linux" (any linux distribution) and - "darwin" (any MacOS distribution). - - Subgroups are helpers added mainly for standard and on farm usage. Farm - may require different environments for e.g. licence related values or - plugins. Default subgroup is "standard". - - Examples: - ``` - { - # Unchanged value - "ENV_KEY1": "value", - # Empty values are kept (unset environment variable) - "ENV_KEY2": "", - - # Join list values with ':' or ';' - "ENV_KEY3": ["value1", "value2"], - - # Environment groups - "ENV_KEY4": { - "standard": "DEMO_SERVER_URL", - "farm": "LICENCE_SERVER_URL" - }, - - # Platform specific (and only for windows and mac) - "ENV_KEY5": { - "windows": "windows value", - "darwin": ["value 1", "value 2"] - }, - - # Environment groups and platform combination - "ENV_KEY6": { - "farm": "FARM_VALUE", - "standard": { - "windows": ["value1", "value2"], - "linux": "value1", - "darwin": "" - } - } - } - ``` - """ - output = {} - if not env_data: - return output - - if not env_group: - env_group = DEFAULT_ENV_SUBGROUP - - if not platform_name: - platform_name = platform.system().lower() - - for key, value in env_data.items(): - if isinstance(value, dict): - # Look if any key is platform key - # - expect that represents environment group if does not contain - # platform keys - if not PLATFORM_NAMES.intersection(set(value.keys())): - # Skip the key if group is not available - if env_group not in value: - continue - value = value[env_group] - - # Check again if value is dictionary - # - this time there should be only platform keys - if isinstance(value, dict): - value = value.get(platform_name) - - # Check if value is list and join it's values - # QUESTION Should empty values be skipped? - if isinstance(value, (list, tuple)): - value = os.pathsep.join(value) - - # Set key to output if value is string - if isinstance(value, six.string_types): - output[key] = value - return output - - -def get_logger(): - """Global lib.applications logger getter.""" - global _logger - if _logger is None: - _logger = Logger.get_logger(__name__) - return _logger - - -class ApplicationNotFound(Exception): - """Application was not found in ApplicationManager by name.""" - - def __init__(self, app_name): - self.app_name = app_name - super(ApplicationNotFound, self).__init__( - "Application \"{}\" was not found.".format(app_name) - ) - - -class ApplictionExecutableNotFound(Exception): - """Defined executable paths are not available on the machine.""" - - def __init__(self, application): - self.application = application - details = None - if not application.executables: - msg = ( - "Executable paths for application \"{}\"({}) are not set." - ) - else: - msg = ( - "Defined executable paths for application \"{}\"({})" - " are not available on this machine." - ) - details = "Defined paths:" - for executable in application.executables: - details += "\n- " + executable.executable_path - - self.msg = msg.format(application.full_label, application.full_name) - self.details = details - - exc_mgs = str(self.msg) - if details: - # Is good idea to pass new line symbol to exception message? - exc_mgs += "\n" + details - self.exc_msg = exc_mgs - super(ApplictionExecutableNotFound, self).__init__(exc_mgs) - - -class ApplicationLaunchFailed(Exception): - """Application launch failed due to known reason. - - Message should be self explanatory as traceback won't be shown. - """ - pass - - -class ApplicationGroup: - """Hold information about application group. - - Application group wraps different versions(variants) of application. - e.g. "maya" is group and "maya_2020" is variant. - - Group hold `host_name` which is implementation name used in AYON. Also - holds `enabled` if whole app group is enabled or `icon` for application - icon path in resources. - - Group has also `environment` which hold same environments for all variants. - - Args: - name (str): Groups' name. - data (dict): Group defying data loaded from settings. - manager (ApplicationManager): Manager that created the group. - """ - - def __init__(self, name, data, manager): - self.name = name - self.manager = manager - self._data = data - - self.enabled = data["enabled"] - self.label = data["label"] or None - self.icon = data["icon"] or None - env = {} - try: - env = json.loads(data["environment"]) - except Exception: - pass - self._environment = env - - host_name = data["host_name"] or None - self.is_host = host_name is not None - self.host_name = host_name - - settings_variants = data["variants"] - variants = {} - for variant_data in settings_variants: - app_variant = Application(variant_data, self) - variants[app_variant.name] = app_variant - - self.variants = variants - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.name) - - def __iter__(self): - for variant in self.variants.values(): - yield variant - - @property - def environment(self): - return copy.deepcopy(self._environment) - - -class Application: - """Hold information about application. - - Object by itself does nothing special. - - Args: - data (dict): Data for the version containing information about - executables, variant label or if is enabled. - Only required key is `executables`. - group (ApplicationGroup): App group object that created the application - and under which application belongs. - - """ - def __init__(self, data, group): - self._data = data - name = data["name"] - label = data["label"] or name - enabled = False - if group.enabled: - enabled = data.get("enabled", True) - - if group.label: - full_label = " ".join((group.label, label)) - else: - full_label = label - env = {} - try: - env = json.loads(data["environment"]) - except Exception: - pass - - arguments = data["arguments"] - if isinstance(arguments, dict): - arguments = arguments.get(platform.system().lower()) - - if not arguments: - arguments = [] - - _executables = data["executables"].get(platform.system().lower(), []) - executables = [ - ApplicationExecutable(executable) - for executable in _executables - ] - - self.group = group - - self.name = name - self.label = label - self.enabled = enabled - self.use_python_2 = data.get("use_python_2", False) - - self.full_name = "/".join((group.name, name)) - self.full_label = full_label - self.arguments = arguments - self.executables = executables - self._environment = env - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.full_name) - - @property - def environment(self): - return copy.deepcopy(self._environment) - - @property - def manager(self): - return self.group.manager - - @property - def host_name(self): - return self.group.host_name - - @property - def icon(self): - return self.group.icon - - @property - def is_host(self): - return self.group.is_host - - def find_executable(self): - """Try to find existing executable for application. - - Returns (str): Path to executable from `executables` or None if any - exists. - """ - for executable in self.executables: - if executable.exists(): - return executable - return None - - def launch(self, *args, **kwargs): - """Launch the application. - - For this purpose is used manager's launch method to keep logic at one - place. - - Arguments must match with manager's launch method. That's why *args - **kwargs are used. - - Returns: - subprocess.Popen: Return executed process as Popen object. - """ - return self.manager.launch(self.full_name, *args, **kwargs) - - -class ApplicationManager: - """Load applications and tools and store them by their full name. - - Args: - studio_settings (dict): Preloaded studio settings. When passed manager - will always use these values. Gives ability to create manager - using different settings. - """ - - def __init__(self, studio_settings=None): - self.log = Logger.get_logger(self.__class__.__name__) - - self.app_groups = {} - self.applications = {} - self.tool_groups = {} - self.tools = {} - - self._studio_settings = studio_settings - - self.refresh() - - def set_studio_settings(self, studio_settings): - """Ability to change init system settings. - - This will trigger refresh of manager. - """ - self._studio_settings = studio_settings - - self.refresh() - - def refresh(self): - """Refresh applications from settings.""" - self.app_groups.clear() - self.applications.clear() - self.tool_groups.clear() - self.tools.clear() - - if self._studio_settings is not None: - settings = copy.deepcopy(self._studio_settings) - else: - settings = get_studio_settings( - clear_metadata=False, exclude_locals=False - ) - - applications_addon_settings = settings["applications"] - - # Prepare known applications - app_defs = applications_addon_settings["applications"] - additional_apps = app_defs.pop("additional_apps") - for additional_app in additional_apps: - app_name = additional_app.pop("name") - if app_name in app_defs: - self.log.warning(( - "Additional application '{}' is already" - " in built-in applications." - ).format(app_name)) - app_defs[app_name] = additional_app - - for group_name, variant_defs in app_defs.items(): - group = ApplicationGroup(group_name, variant_defs, self) - self.app_groups[group_name] = group - for app in group: - self.applications[app.full_name] = app - - tools_definitions = applications_addon_settings["tool_groups"] - for tool_group_data in tools_definitions: - group = EnvironmentToolGroup(tool_group_data, self) - self.tool_groups[group.name] = group - for tool in group: - self.tools[tool.full_name] = tool - - def find_latest_available_variant_for_group(self, group_name): - group = self.app_groups.get(group_name) - if group is None or not group.enabled: - return None - - output = None - for _, variant in reversed(sorted(group.variants.items())): - executable = variant.find_executable() - if executable: - output = variant - break - return output - - def create_launch_context(self, app_name, **data): - """Prepare launch context for application. - - Args: - app_name (str): Name of application that should be launched. - **data (Any): Any additional data. Data may be used during - - Returns: - ApplicationLaunchContext: Launch context for application. - - Raises: - ApplicationNotFound: Application was not found by entered name. - """ - - app = self.applications.get(app_name) - if not app: - raise ApplicationNotFound(app_name) - - executable = app.find_executable() - - return ApplicationLaunchContext( - app, executable, **data - ) - - def launch_with_context(self, launch_context): - """Launch application using existing launch context. - - Args: - launch_context (ApplicationLaunchContext): Prepared launch - context. - """ - - if not launch_context.executable: - raise ApplictionExecutableNotFound(launch_context.application) - return launch_context.launch() - - def launch(self, app_name, **data): - """Launch procedure. - - For host application it's expected to contain "project_name", - "folder_path" and "task_name". - - Args: - app_name (str): Name of application that should be launched. - **data (dict): Any additional data. Data may be used during - preparation to store objects usable in multiple places. - - Raises: - ApplicationNotFound: Application was not found by entered - argument `app_name`. - ApplictionExecutableNotFound: Executables in application definition - were not found on this machine. - ApplicationLaunchFailed: Something important for application launch - failed. Exception should contain explanation message, - traceback should not be needed. - """ - - context = self.create_launch_context(app_name, **data) - return self.launch_with_context(context) - - - -class EnvironmentToolGroup: - """Hold information about environment tool group. - - Environment tool group may hold different variants of same tool and set - environments that are same for all of them. - - e.g. "mtoa" may have different versions but all environments except one - are same. - - Args: - data (dict): Group information with variants. - manager (ApplicationManager): Manager that creates the group. - """ - - def __init__(self, data, manager): - name = data["name"] - label = data["label"] - - self.name = name - self.label = label - self._data = data - self.manager = manager - - environment = {} - try: - environment = json.loads(data["environment"]) - except Exception: - pass - self._environment = environment - - variants = data.get("variants") or [] - variants_by_name = {} - for variant_data in variants: - tool = EnvironmentTool(variant_data, self) - variants_by_name[tool.name] = tool - self.variants = variants_by_name - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.name) - - def __iter__(self): - for variant in self.variants.values(): - yield variant - - @property - def environment(self): - return copy.deepcopy(self._environment) - - -class EnvironmentTool: - """Hold information about application tool. - - Structure of tool information. - - Args: - variant_data (dict): Variant data with environments and - host and app variant filters. - group (EnvironmentToolGroup): Name of group which wraps tool. - """ - - def __init__(self, variant_data, group): - # Backwards compatibility 3.9.1 - 3.9.2 - # - 'variant_data' contained only environments but contain also host - # and application variant filters - name = variant_data["name"] - label = variant_data["label"] - host_names = variant_data["host_names"] - app_variants = variant_data["app_variants"] - - environment = {} - try: - environment = json.loads(variant_data["environment"]) - except Exception: - pass - - self.host_names = host_names - self.app_variants = app_variants - self.name = name - self.variant_label = label - self.label = " ".join((group.label, label)) - self.group = group - - self._environment = environment - self.full_name = "/".join((group.name, name)) - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.full_name) - - @property - def environment(self): - return copy.deepcopy(self._environment) - - def is_valid_for_app(self, app): - """Is tool valid for application. - - Args: - app (Application): Application for which are prepared environments. - """ - if self.app_variants and app.full_name not in self.app_variants: - return False - - if self.host_names and app.host_name not in self.host_names: - return False - return True - - -class ApplicationExecutable: - """Representation of executable loaded from settings.""" - - def __init__(self, executable): - # Try to format executable with environments - try: - executable = executable.format(**os.environ) - except Exception: - pass - - # On MacOS check if exists path to executable when ends with `.app` - # - it is common that path will lead to "/Applications/Blender" but - # real path is "/Applications/Blender.app" - if platform.system().lower() == "darwin": - executable = self.macos_executable_prep(executable) - - self.executable_path = executable - - def __str__(self): - return self.executable_path - - def __repr__(self): - return "<{}> {}".format(self.__class__.__name__, self.executable_path) - - @staticmethod - def macos_executable_prep(executable): - """Try to find full path to executable file. - - Real executable is stored in '*.app/Contents/MacOS/'. - - Having path to '*.app' gives ability to read it's plist info and - use "CFBundleExecutable" key from plist to know what is "executable." - - Plist is stored in '*.app/Contents/Info.plist'. - - This is because some '*.app' directories don't have same permissions - as real executable. - """ - # Try to find if there is `.app` file - if not os.path.exists(executable): - _executable = executable + ".app" - if os.path.exists(_executable): - executable = _executable - - # Try to find real executable if executable has `Contents` subfolder - contents_dir = os.path.join(executable, "Contents") - if os.path.exists(contents_dir): - executable_filename = None - # Load plist file and check for bundle executable - plist_filepath = os.path.join(contents_dir, "Info.plist") - if os.path.exists(plist_filepath): - import plistlib - - if hasattr(plistlib, "load"): - with open(plist_filepath, "rb") as stream: - parsed_plist = plistlib.load(stream) - else: - parsed_plist = plistlib.readPlist(plist_filepath) - executable_filename = parsed_plist.get("CFBundleExecutable") - - if executable_filename: - executable = os.path.join( - contents_dir, "MacOS", executable_filename - ) - - return executable - - def as_args(self): - return [self.executable_path] - - def _realpath(self): - """Check if path is valid executable path.""" - # Check for executable in PATH - result = find_executable(self.executable_path) - if result is not None: - return result - - # This is not 100% validation but it is better than remove ability to - # launch .bat, .sh or extentionless files - if os.path.exists(self.executable_path): - return self.executable_path - return None - - def exists(self): - if not self.executable_path: - return False - return bool(self._realpath()) - - -class UndefinedApplicationExecutable(ApplicationExecutable): - """Some applications do not require executable path from settings. - - In that case this class is used to "fake" existing executable. - """ - def __init__(self): - pass - - def __str__(self): - return self.__class__.__name__ - - def __repr__(self): - return "<{}>".format(self.__class__.__name__) - - def as_args(self): - return [] - - def exists(self): - return True - - -@six.add_metaclass(ABCMeta) -class LaunchHook: - """Abstract base class of launch hook.""" - # Order of prelaunch hook, will be executed as last if set to None. - order = None - # List of host implementations, skipped if empty. - hosts = set() - # Set of application groups - app_groups = set() - # Set of specific application names - app_names = set() - # Set of platform availability - platforms = set() - # Set of launch types for which is available - # - if empty then is available for all launch types - # - by default has 'local' which is most common reason for launc hooks - launch_types = {LaunchTypes.local} - - def __init__(self, launch_context): - """Constructor of launch hook. - - Always should be called - """ - self.log = Logger.get_logger(self.__class__.__name__) - - self.launch_context = launch_context - - is_valid = self.class_validation(launch_context) - if is_valid: - is_valid = self.validate() - - self.is_valid = is_valid - - @classmethod - def class_validation(cls, launch_context): - """Validation of class attributes by launch context. - - Args: - launch_context (ApplicationLaunchContext): Context of launching - application. - - Returns: - bool: Is launch hook valid for the context by class attributes. - """ - if cls.platforms: - low_platforms = tuple( - _platform.lower() - for _platform in cls.platforms - ) - if platform.system().lower() not in low_platforms: - return False - - if cls.hosts: - if launch_context.host_name not in cls.hosts: - return False - - if cls.app_groups: - if launch_context.app_group.name not in cls.app_groups: - return False - - if cls.app_names: - if launch_context.app_name not in cls.app_names: - return False - - if cls.launch_types: - if launch_context.launch_type not in cls.launch_types: - return False - - return True - - @property - def data(self): - return self.launch_context.data - - @property - def application(self): - return getattr(self.launch_context, "application", None) - - @property - def manager(self): - return getattr(self.application, "manager", None) - - @property - def host_name(self): - return getattr(self.application, "host_name", None) - - @property - def app_group(self): - return getattr(self.application, "group", None) - - @property - def app_name(self): - return getattr(self.application, "full_name", None) - - @property - def addons_manager(self): - return getattr(self.launch_context, "addons_manager", None) - - @property - def modules_manager(self): - """ - Deprecated: - Use 'addons_wrapper' instead. - """ - return self.addons_manager - - def validate(self): - """Optional validation of launch hook on initialization. - - Returns: - bool: Hook is valid (True) or invalid (False). - """ - # QUESTION Not sure if this method has any usable potential. - # - maybe result can be based on settings - return True - - @abstractmethod - def execute(self, *args, **kwargs): - """Abstract execute method where logic of hook is.""" - pass - - -class PreLaunchHook(LaunchHook): - """Abstract class of prelaunch hook. - - This launch hook will be processed before application is launched. - - If any exception will happen during processing the application won't be - launched. - """ - - -class PostLaunchHook(LaunchHook): - """Abstract class of postlaunch hook. - - This launch hook will be processed after application is launched. - - Nothing will happen if any exception will happen during processing. And - processing of other postlaunch hooks won't stop either. - """ - - -class ApplicationLaunchContext: - """Context of launching application. - - Main purpose of context is to prepare launch arguments and keyword - arguments for new process. Most important part of keyword arguments - preparations are environment variables. - - During the whole process is possible to use `data` attribute to store - object usable in multiple places. - - Launch arguments are strings in list. It is possible to "chain" argument - when order of them matters. That is possible to do with adding list where - order is right and should not change. - NOTE: This is recommendation, not requirement. - e.g.: `["nuke.exe", "--NukeX"]` -> In this case any part of process may - insert argument between `nuke.exe` and `--NukeX`. To keep them together - it is better to wrap them in another list: `[["nuke.exe", "--NukeX"]]`. - - Notes: - It is possible to use launch context only to prepare environment - variables. In that case `executable` may be None and can be used - 'run_prelaunch_hooks' method to run prelaunch hooks which prepare - them. - - Args: - application (Application): Application definition. - executable (ApplicationExecutable): Object with path to executable. - env_group (Optional[str]): Environment variable group. If not set - 'DEFAULT_ENV_SUBGROUP' is used. - launch_type (Optional[str]): Launch type. If not set 'local' is used. - **data (dict): Any additional data. Data may be used during - preparation to store objects usable in multiple places. - """ - - def __init__( - self, - application, - executable, - env_group=None, - launch_type=None, - **data - ): - from ayon_core.addon import AddonsManager - - # Application object - self.application = application - - self.addons_manager = AddonsManager() - - # Logger - logger_name = "{}-{}".format(self.__class__.__name__, - self.application.full_name) - self.log = Logger.get_logger(logger_name) - - self.executable = executable - - if launch_type is None: - launch_type = LaunchTypes.local - self.launch_type = launch_type - - if env_group is None: - env_group = DEFAULT_ENV_SUBGROUP - - self.env_group = env_group - - self.data = dict(data) - - launch_args = [] - if executable is not None: - launch_args = executable.as_args() - # subprocess.Popen launch arguments (first argument in constructor) - self.launch_args = launch_args - self.launch_args.extend(application.arguments) - if self.data.get("app_args"): - self.launch_args.extend(self.data.pop("app_args")) - - # Handle launch environemtns - src_env = self.data.pop("env", None) - if src_env is not None and not isinstance(src_env, dict): - self.log.warning(( - "Passed `env` kwarg has invalid type: {}. Expected: `dict`." - " Using `os.environ` instead." - ).format(str(type(src_env)))) - src_env = None - - if src_env is None: - src_env = os.environ - - ignored_env = {"QT_API", } - env = { - key: str(value) - for key, value in src_env.items() - if key not in ignored_env - } - # subprocess.Popen keyword arguments - self.kwargs = {"env": env} - - if platform.system().lower() == "windows": - # Detach new process from currently running process on Windows - flags = ( - subprocess.CREATE_NEW_PROCESS_GROUP - | subprocess.DETACHED_PROCESS - ) - self.kwargs["creationflags"] = flags - - if not sys.stdout: - self.kwargs["stdout"] = subprocess.DEVNULL - self.kwargs["stderr"] = subprocess.DEVNULL - - self.prelaunch_hooks = None - self.postlaunch_hooks = None - - self.process = None - self._prelaunch_hooks_executed = False - - @property - def env(self): - if ( - "env" not in self.kwargs - or self.kwargs["env"] is None - ): - self.kwargs["env"] = {} - return self.kwargs["env"] - - @env.setter - def env(self, value): - if not isinstance(value, dict): - raise ValueError( - "'env' attribute expect 'dict' object. Got: {}".format( - str(type(value)) - ) - ) - self.kwargs["env"] = value - - @property - def modules_manager(self): - """ - Deprecated: - Use 'addons_manager' instead. - - """ - return self.addons_manager - - def _collect_addons_launch_hook_paths(self): - """Helper to collect application launch hooks from addons. - - Module have to have implemented 'get_launch_hook_paths' method which - can expect application as argument or nothing. - - Returns: - List[str]: Paths to launch hook directories. - """ - - expected_types = (list, tuple, set) - - output = [] - for module in self.addons_manager.get_enabled_addons(): - # Skip module if does not have implemented 'get_launch_hook_paths' - func = getattr(module, "get_launch_hook_paths", None) - if func is None: - continue - - func = module.get_launch_hook_paths - if hasattr(inspect, "signature"): - sig = inspect.signature(func) - expect_args = len(sig.parameters) > 0 - else: - expect_args = len(inspect.getargspec(func)[0]) > 0 - - # Pass application argument if method expect it. - try: - if expect_args: - hook_paths = func(self.application) - else: - hook_paths = func() - except Exception: - self.log.warning( - "Failed to call 'get_launch_hook_paths'", - exc_info=True - ) - continue - - if not hook_paths: - continue - - # Convert string to list - if isinstance(hook_paths, six.string_types): - hook_paths = [hook_paths] - - # Skip invalid types - if not isinstance(hook_paths, expected_types): - self.log.warning(( - "Result of `get_launch_hook_paths`" - " has invalid type {}. Expected {}" - ).format(type(hook_paths), expected_types)) - continue - - output.extend(hook_paths) - return output - - def paths_to_launch_hooks(self): - """Directory paths where to look for launch hooks.""" - # This method has potential to be part of application manager (maybe). - paths = [] - - # TODO load additional studio paths from settings - global_hooks_dir = os.path.join(AYON_CORE_ROOT, "hooks") - - hooks_dirs = [ - global_hooks_dir - ] - if self.host_name: - # If host requires launch hooks and is module then launch hooks - # should be collected using 'collect_launch_hook_paths' - # - module have to implement 'get_launch_hook_paths' - host_module = self.addons_manager.get_host_addon(self.host_name) - if not host_module: - hooks_dirs.append(os.path.join( - AYON_CORE_ROOT, "hosts", self.host_name, "hooks" - )) - - for path in hooks_dirs: - if ( - os.path.exists(path) - and os.path.isdir(path) - and path not in paths - ): - paths.append(path) - - # Load modules paths - paths.extend(self._collect_addons_launch_hook_paths()) - - return paths - - def discover_launch_hooks(self, force=False): - """Load and prepare launch hooks.""" - if ( - self.prelaunch_hooks is not None - or self.postlaunch_hooks is not None - ): - if not force: - self.log.info("Launch hooks were already discovered.") - return - - self.prelaunch_hooks.clear() - self.postlaunch_hooks.clear() - - self.log.debug("Discovery of launch hooks started.") - - paths = self.paths_to_launch_hooks() - self.log.debug("Paths searched for launch hooks:\n{}".format( - "\n".join("- {}".format(path) for path in paths) - )) - - all_classes = { - "pre": [], - "post": [] - } - for path in paths: - if not os.path.exists(path): - self.log.info( - "Path to launch hooks does not exist: \"{}\"".format(path) - ) - continue - - modules, _crashed = modules_from_path(path) - for _filepath, module in modules: - all_classes["pre"].extend( - classes_from_module(PreLaunchHook, module) - ) - all_classes["post"].extend( - classes_from_module(PostLaunchHook, module) - ) - - for launch_type, classes in all_classes.items(): - hooks_with_order = [] - hooks_without_order = [] - for klass in classes: - try: - hook = klass(self) - if not hook.is_valid: - self.log.debug( - "Skipped hook invalid for current launch context: " - "{}".format(klass.__name__) - ) - continue - - if inspect.isabstract(hook): - self.log.debug("Skipped abstract hook: {}".format( - klass.__name__ - )) - continue - - # Separate hooks by pre/post class - if hook.order is None: - hooks_without_order.append(hook) - else: - hooks_with_order.append(hook) - - except Exception: - self.log.warning( - "Initialization of hook failed: " - "{}".format(klass.__name__), - exc_info=True - ) - - # Sort hooks with order by order - ordered_hooks = list(sorted( - hooks_with_order, key=lambda obj: obj.order - )) - # Extend ordered hooks with hooks without defined order - ordered_hooks.extend(hooks_without_order) - - if launch_type == "pre": - self.prelaunch_hooks = ordered_hooks - else: - self.postlaunch_hooks = ordered_hooks - - self.log.debug("Found {} prelaunch and {} postlaunch hooks.".format( - len(self.prelaunch_hooks), len(self.postlaunch_hooks) - )) - - @property - def app_name(self): - return self.application.name - - @property - def host_name(self): - return self.application.host_name - - @property - def app_group(self): - return self.application.group - - @property - def manager(self): - return self.application.manager - - def _run_process(self): - # Windows and MacOS have easier process start - low_platform = platform.system().lower() - if low_platform in ("windows", "darwin"): - return subprocess.Popen(self.launch_args, **self.kwargs) - - # Linux uses mid process - # - it is possible that the mid process executable is not - # available for this version of AYON in that case use standard - # launch - launch_args = get_linux_launcher_args() - if launch_args is None: - return subprocess.Popen(self.launch_args, **self.kwargs) - - # Prepare data that will be passed to midprocess - # - store arguments to a json and pass path to json as last argument - # - pass environments to set - app_env = self.kwargs.pop("env", {}) - json_data = { - "args": self.launch_args, - "env": app_env - } - if app_env: - # Filter environments of subprocess - self.kwargs["env"] = { - key: value - for key, value in os.environ.items() - if key in app_env - } - - # Create temp file - json_temp = tempfile.NamedTemporaryFile( - mode="w", prefix="op_app_args", suffix=".json", delete=False - ) - json_temp.close() - json_temp_filpath = json_temp.name - with open(json_temp_filpath, "w") as stream: - json.dump(json_data, stream) - - launch_args.append(json_temp_filpath) - - # Create mid-process which will launch application - process = subprocess.Popen(launch_args, **self.kwargs) - # Wait until the process finishes - # - This is important! The process would stay in "open" state. - process.wait() - # Remove the temp file - os.remove(json_temp_filpath) - # Return process which is already terminated - return process - - def run_prelaunch_hooks(self): - """Run prelaunch hooks. - - This method will be executed only once, any future calls will skip - the processing. - """ - - if self._prelaunch_hooks_executed: - self.log.warning("Prelaunch hooks were already executed.") - return - # Discover launch hooks - self.discover_launch_hooks() - - # Execute prelaunch hooks - for prelaunch_hook in self.prelaunch_hooks: - self.log.debug("Executing prelaunch hook: {}".format( - str(prelaunch_hook.__class__.__name__) - )) - prelaunch_hook.execute() - self._prelaunch_hooks_executed = True - - def launch(self): - """Collect data for new process and then create it. - - This method must not be executed more than once. - - Returns: - subprocess.Popen: Created process as Popen object. - """ - if self.process is not None: - self.log.warning("Application was already launched.") - return - - if not self._prelaunch_hooks_executed: - self.run_prelaunch_hooks() - - self.log.debug("All prelaunch hook executed. Starting new process.") - - # Prepare subprocess args - args_len_str = "" - if isinstance(self.launch_args, str): - args = self.launch_args - else: - args = self.clear_launch_args(self.launch_args) - args_len_str = " ({})".format(len(args)) - self.log.info( - "Launching \"{}\" with args{}: {}".format( - self.application.full_name, args_len_str, args - ) - ) - self.launch_args = args - - # Run process - self.process = self._run_process() - - # Process post launch hooks - for postlaunch_hook in self.postlaunch_hooks: - self.log.debug("Executing postlaunch hook: {}".format( - str(postlaunch_hook.__class__.__name__) - )) - - # TODO how to handle errors? - # - store to variable to let them accessible? - try: - postlaunch_hook.execute() - - except Exception: - self.log.warning( - "After launch procedures were not successful.", - exc_info=True - ) - - self.log.debug("Launch of {} finished.".format( - self.application.full_name - )) - - return self.process - - @staticmethod - def clear_launch_args(args): - """Collect launch arguments to final order. - - Launch argument should be list that may contain another lists this - function will upack inner lists and keep ordering. - - ``` - # source - [ [ arg1, [ arg2, arg3 ] ], arg4, [arg5, arg6]] - # result - [ arg1, arg2, arg3, arg4, arg5, arg6] - - Args: - args (list): Source arguments in list may contain inner lists. - - Return: - list: Unpacked arguments. - """ - if isinstance(args, str): - return args - all_cleared = False - while not all_cleared: - all_cleared = True - new_args = [] - for arg in args: - if isinstance(arg, (list, tuple, set)): - all_cleared = False - for _arg in arg: - new_args.append(_arg) - else: - new_args.append(arg) - args = new_args - - return args - - -class MissingRequiredKey(KeyError): - pass - - -class EnvironmentPrepData(dict): - """Helper dictionary for storin temp data during environment prep. - - Args: - data (dict): Data must contain required keys. - """ - required_keys = ( - "project_entity", "folder_entity", "task_entity", "app", "anatomy" - ) - - def __init__(self, data): - for key in self.required_keys: - if key not in data: - raise MissingRequiredKey(key) - - if not data.get("log"): - data["log"] = get_logger() - - if data.get("env") is None: - data["env"] = os.environ.copy() - - project_name = data["project_entity"]["name"] - if "project_settings" not in data: - data["project_settings"] = get_project_settings(project_name) - - super(EnvironmentPrepData, self).__init__(data) - - -def get_app_environments_for_context( - project_name, - folder_path, - task_name, - app_name, - env_group=None, - launch_type=None, - env=None, - addons_manager=None -): - """Prepare environment variables by context. - Args: - project_name (str): Name of project. - folder_path (str): Folder path. - task_name (str): Name of task. - app_name (str): Name of application that is launched and can be found - by ApplicationManager. - env_group (Optional[str]): Name of environment group. If not passed - default group is used. - launch_type (Optional[str]): Type for which prelaunch hooks are - executed. - env (Optional[dict[str, str]]): Initial environment variables. - `os.environ` is used when not passed. - addons_manager (Optional[AddonsManager]): Initialized modules - manager. - - Returns: - dict: Environments for passed context and application. - """ - - # Prepare app object which can be obtained only from ApplicationManager - app_manager = ApplicationManager() - context = app_manager.create_launch_context( - app_name, - project_name=project_name, - folder_path=folder_path, - task_name=task_name, - env_group=env_group, - launch_type=launch_type, - env=env, - addons_manager=addons_manager, - modules_manager=addons_manager, - ) - context.run_prelaunch_hooks() - return context.env - - -def _merge_env(env, current_env): - """Modified function(merge) from acre module.""" - import acre - - result = current_env.copy() - for key, value in env.items(): - # Keep missing keys by not filling `missing` kwarg - value = acre.lib.partial_format(value, data=current_env) - result[key] = value - return result - - -def _add_python_version_paths(app, env, logger, addons_manager): - """Add vendor packages specific for a Python version.""" - - for addon in addons_manager.get_enabled_addons(): - addon.modify_application_launch_arguments(app, env) - - # Skip adding if host name is not set - if not app.host_name: - return - - # Add Python 2/3 modules - python_vendor_dir = os.path.join( - AYON_CORE_ROOT, - "vendor", - "python" - ) - if app.use_python_2: - pythonpath = os.path.join(python_vendor_dir, "python_2") - else: - pythonpath = os.path.join(python_vendor_dir, "python_3") - - if not os.path.exists(pythonpath): - return - - logger.debug("Adding Python version specific paths to PYTHONPATH") - python_paths = [pythonpath] - - # Load PYTHONPATH from current launch context - python_path = env.get("PYTHONPATH") - if python_path: - python_paths.append(python_path) - - # Set new PYTHONPATH to launch context environments - env["PYTHONPATH"] = os.pathsep.join(python_paths) - - -def prepare_app_environments( - data, env_group=None, implementation_envs=True, addons_manager=None -): - """Modify launch environments based on launched app and context. - - Args: - data (EnvironmentPrepData): Dictionary where result and intermediate - result will be stored. - """ - import acre - - app = data["app"] - log = data["log"] - source_env = data["env"].copy() - - if addons_manager is None: - from ayon_core.addon import AddonsManager - - addons_manager = AddonsManager() - - _add_python_version_paths(app, source_env, log, addons_manager) - - # Use environments from local settings - filtered_local_envs = {} - # NOTE Overrides for environment variables are not implemented in AYON. - # project_settings = data["project_settings"] - # whitelist_envs = project_settings["general"].get("local_env_white_list") - # if whitelist_envs: - # local_settings = get_local_settings() - # local_envs = local_settings.get("environments") or {} - # filtered_local_envs = { - # key: value - # for key, value in local_envs.items() - # if key in whitelist_envs - # } - - # Apply local environment variables for already existing values - for key, value in filtered_local_envs.items(): - if key in source_env: - source_env[key] = value - - # `app_and_tool_labels` has debug purpose - app_and_tool_labels = [app.full_name] - # Environments for application - environments = [ - app.group.environment, - app.environment - ] - - folder_entity = data.get("folder_entity") - # Add tools environments - groups_by_name = {} - tool_by_group_name = collections.defaultdict(dict) - if folder_entity: - # Make sure each tool group can be added only once - for key in folder_entity["attrib"].get("tools") or []: - tool = app.manager.tools.get(key) - if not tool or not tool.is_valid_for_app(app): - continue - groups_by_name[tool.group.name] = tool.group - tool_by_group_name[tool.group.name][tool.name] = tool - - for group_name in sorted(groups_by_name.keys()): - group = groups_by_name[group_name] - environments.append(group.environment) - for tool_name in sorted(tool_by_group_name[group_name].keys()): - tool = tool_by_group_name[group_name][tool_name] - environments.append(tool.environment) - app_and_tool_labels.append(tool.full_name) - - log.debug( - "Will add environments for apps and tools: {}".format( - ", ".join(app_and_tool_labels) - ) - ) - - env_values = {} - for _env_values in environments: - if not _env_values: - continue - - # Choose right platform - tool_env = parse_environments(_env_values, env_group) - - # Apply local environment variables - # - must happen between all values because they may be used during - # merge - for key, value in filtered_local_envs.items(): - if key in tool_env: - tool_env[key] = value - - # Merge dictionaries - env_values = _merge_env(tool_env, env_values) - - merged_env = _merge_env(env_values, source_env) - - loaded_env = acre.compute(merged_env, cleanup=False) - - final_env = None - # Add host specific environments - if app.host_name and implementation_envs: - host_addon = addons_manager.get_host_addon(app.host_name) - if not host_addon: - module = __import__("ayon_core.hosts", fromlist=[app.host_name]) - host_module = getattr(module, app.host_name, None) - add_implementation_envs = None - if host_addon: - add_implementation_envs = getattr( - host_addon, "add_implementation_envs", None - ) - if add_implementation_envs: - # Function may only modify passed dict without returning value - final_env = add_implementation_envs(loaded_env, app) - - if final_env is None: - final_env = loaded_env - - keys_to_remove = set(source_env.keys()) - set(final_env.keys()) - - # Update env - data["env"].update(final_env) - for key in keys_to_remove: - data["env"].pop(key, None) - - -def apply_project_environments_value( - project_name, env, project_settings=None, env_group=None -): - """Apply project specific environments on passed environments. - - The environments are applied on passed `env` argument value so it is not - required to apply changes back. - - Args: - project_name (str): Name of project for which environments should be - received. - env (dict): Environment values on which project specific environments - will be applied. - project_settings (dict): Project settings for passed project name. - Optional if project settings are already prepared. - - Returns: - dict: Passed env values with applied project environments. - - Raises: - KeyError: If project settings do not contain keys for project specific - environments. - """ - import acre - - if project_settings is None: - project_settings = get_project_settings(project_name) - - env_value = project_settings["core"]["project_environments"] - if env_value: - env_value = json.loads(env_value) - parsed_value = parse_environments(env_value, env_group) - env.update(acre.compute( - _merge_env(parsed_value, env), - cleanup=False - )) - return env - - -def prepare_context_environments(data, env_group=None, addons_manager=None): - """Modify launch environments with context data for launched host. - - Args: - data (EnvironmentPrepData): Dictionary where result and intermediate - result will be stored. - """ - - from ayon_core.pipeline.template_data import get_template_data - - # Context environments - log = data["log"] - - project_entity = data["project_entity"] - folder_entity = data["folder_entity"] - task_entity = data["task_entity"] - if not project_entity: - log.info( - "Skipping context environments preparation." - " Launch context does not contain required data." - ) - return - - # Load project specific environments - project_name = project_entity["name"] - project_settings = get_project_settings(project_name) - data["project_settings"] = project_settings - - app = data["app"] - context_env = { - "AYON_PROJECT_NAME": project_entity["name"], - "AYON_APP_NAME": app.full_name - } - if folder_entity: - folder_path = folder_entity["path"] - context_env["AYON_FOLDER_PATH"] = folder_path - - if task_entity: - context_env["AYON_TASK_NAME"] = task_entity["name"] - - log.debug( - "Context environments set:\n{}".format( - json.dumps(context_env, indent=4) - ) - ) - data["env"].update(context_env) - - # Apply project specific environments on current env value - # - apply them once the context environments are set - apply_project_environments_value( - project_name, data["env"], project_settings, env_group - ) - - if not app.is_host: - return - - data["env"]["AYON_HOST_NAME"] = app.host_name - - if not folder_entity or not task_entity: - # QUESTION replace with log.info and skip workfile discovery? - # - technically it should be possible to launch host without context - raise ApplicationLaunchFailed( - "Host launch require folder and task context." - ) - - workdir_data = get_template_data( - project_entity, - folder_entity, - task_entity, - app.host_name, - project_settings - ) - data["workdir_data"] = workdir_data - - anatomy = data["anatomy"] - - task_type = workdir_data["task"]["type"] - # Temp solution how to pass task type to `_prepare_last_workfile` - data["task_type"] = task_type - - try: - from ayon_core.pipeline.workfile import get_workdir_with_workdir_data - - workdir = get_workdir_with_workdir_data( - workdir_data, - anatomy.project_name, - anatomy, - project_settings=project_settings - ) - - except Exception as exc: - raise ApplicationLaunchFailed( - "Error in anatomy.format: {}".format(str(exc)) - ) - - if not os.path.exists(workdir): - log.debug( - "Creating workdir folder: \"{}\"".format(workdir) - ) - try: - os.makedirs(workdir) - except Exception as exc: - raise ApplicationLaunchFailed( - "Couldn't create workdir because: {}".format(str(exc)) - ) - - data["env"]["AYON_WORKDIR"] = workdir - - _prepare_last_workfile(data, workdir, addons_manager) - - -def _prepare_last_workfile(data, workdir, addons_manager): - """last workfile workflow preparation. - - Function check if should care about last workfile workflow and tries - to find the last workfile. Both information are stored to `data` and - environments. - - Last workfile is filled always (with version 1) even if any workfile - exists yet. - - Args: - data (EnvironmentPrepData): Dictionary where result and intermediate - result will be stored. - workdir (str): Path to folder where workfiles should be stored. - """ - - from ayon_core.addon import AddonsManager - from ayon_core.pipeline import HOST_WORKFILE_EXTENSIONS - from ayon_core.pipeline.workfile import ( - should_use_last_workfile_on_launch, - should_open_workfiles_tool_on_launch, - ) - - if not addons_manager: - addons_manager = AddonsManager() - - log = data["log"] - - _workdir_data = data.get("workdir_data") - if not _workdir_data: - log.info( - "Skipping last workfile preparation." - " Key `workdir_data` not filled." - ) - return - - app = data["app"] - workdir_data = copy.deepcopy(_workdir_data) - project_name = data["project_name"] - task_name = data["task_name"] - task_type = data["task_type"] - - start_last_workfile = data.get("start_last_workfile") - if start_last_workfile is None: - start_last_workfile = should_use_last_workfile_on_launch( - project_name, app.host_name, task_name, task_type - ) - else: - log.info("Opening of last workfile was disabled by user") - - data["start_last_workfile"] = start_last_workfile - - workfile_startup = should_open_workfiles_tool_on_launch( - project_name, app.host_name, task_name, task_type - ) - data["workfile_startup"] = workfile_startup - - # Store boolean as "0"(False) or "1"(True) - data["env"]["AVALON_OPEN_LAST_WORKFILE"] = ( - str(int(bool(start_last_workfile))) - ) - data["env"]["AYON_WORKFILE_TOOL_ON_START"] = ( - str(int(bool(workfile_startup))) - ) - - _sub_msg = "" if start_last_workfile else " not" - log.debug( - "Last workfile should{} be opened on start.".format(_sub_msg) - ) - - # Last workfile path - last_workfile_path = data.get("last_workfile_path") or "" - if not last_workfile_path: - host_addon = addons_manager.get_host_addon(app.host_name) - if host_addon: - extensions = host_addon.get_workfile_extensions() - else: - extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) - - if extensions: - from ayon_core.pipeline.workfile import ( - get_workfile_template_key, - get_last_workfile - ) - - anatomy = data["anatomy"] - project_settings = data["project_settings"] - task_type = workdir_data["task"]["type"] - template_key = get_workfile_template_key( - project_name, - task_type, - app.host_name, - project_settings=project_settings - ) - # Find last workfile - file_template = anatomy.get_template_item( - "work", template_key, "file" - ).template - - workdir_data.update({ - "version": 1, - "user": get_ayon_username(), - "ext": extensions[0] - }) - - last_workfile_path = get_last_workfile( - workdir, file_template, workdir_data, extensions, True - ) - - if os.path.exists(last_workfile_path): - log.debug(( - "Workfiles for launch context does not exists" - " yet but path will be set." - )) - log.debug( - "Setting last workfile path: {}".format(last_workfile_path) - ) - - data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path - data["last_workfile_path"] = last_workfile_path diff --git a/client/ayon_core/lib/plugin_tools.py b/client/ayon_core/lib/plugin_tools.py index 5ad4da88b9..654bc7ac4a 100644 --- a/client/ayon_core/lib/plugin_tools.py +++ b/client/ayon_core/lib/plugin_tools.py @@ -94,8 +94,12 @@ def prepare_template_data(fill_pairs): output = {} for item in valid_items: keys, value = item - upper_value = value.upper() - capitalized_value = _capitalize_value(value) + # Convert only string values + if isinstance(value, str): + upper_value = value.upper() + capitalized_value = _capitalize_value(value) + else: + upper_value = capitalized_value = value first_key = keys.pop(0) if not keys: diff --git a/client/ayon_core/lib/terminal.py b/client/ayon_core/lib/terminal.py index a22f2358aa..10fcc79a27 100644 --- a/client/ayon_core/lib/terminal.py +++ b/client/ayon_core/lib/terminal.py @@ -1,15 +1,5 @@ # -*- coding: utf-8 -*- """Package helping with colorizing and formatting terminal output.""" -# :: -# //. ... .. ///. //. -# ///\\\ \\\ \\ ///\\\ /// -# /// \\ \\\ \\ /// \\ /// // -# \\\ // \\\ // \\\ // \\\// ./ -# \\\// \\\// \\\// \\\' // -# \\\ \\\ \\\ \\\// -# ''' ''' ''' ''' -# ..---===[[ PyP3 Setup ]]===---... -# import re import time import threading diff --git a/client/ayon_core/lib/transcoding.py b/client/ayon_core/lib/transcoding.py index 08e0bc9237..4d778c2091 100644 --- a/client/ayon_core/lib/transcoding.py +++ b/client/ayon_core/lib/transcoding.py @@ -45,7 +45,7 @@ ARRAY_TYPE_REGEX = re.compile(r"^(int|float|string)\[\d+\]$") IMAGE_EXTENSIONS = { ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", - ".cal", ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", + ".cal", ".cin", ".cpc", ".cpt", ".dds", ".dng", ".dpx", ".ecw", ".exr", ".fits", ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer", ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2", ".jng", ".jpeg", ".jpeg-ls", ".jpeg-hdr", ".2000", ".jpg", diff --git a/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py b/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py index 61c5eac2f5..8381c7d73e 100644 --- a/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py +++ b/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py @@ -11,19 +11,17 @@ class ClockifyStart(LauncherAction): order = 500 clockify_api = ClockifyAPI() - def is_compatible(self, session): + def is_compatible(self, selection): """Return whether the action is compatible with the session""" - if "AYON_TASK_NAME" in session: - return True - return False + return selection.is_task_selected - def process(self, session, **kwargs): + def process(self, selection, **kwargs): self.clockify_api.set_api() user_id = self.clockify_api.user_id workspace_id = self.clockify_api.workspace_id - project_name = session["AYON_PROJECT_NAME"] - folder_path = session["AYON_FOLDER_PATH"] - task_name = session["AYON_TASK_NAME"] + project_name = selection.project_name + folder_path = selection.folder_path + task_name = selection.task_name description = "/".join([folder_path.lstrip("/"), task_name]) # fetch folder entity diff --git a/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py b/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py index 72187c6d28..5388f47c98 100644 --- a/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py +++ b/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py @@ -19,15 +19,18 @@ class ClockifySync(LauncherAction): order = 500 clockify_api = ClockifyAPI() - def is_compatible(self, session): + def is_compatible(self, selection): """Check if there's some projects to sync""" + if selection.is_project_selected: + return True + try: next(ayon_api.get_projects()) return True except StopIteration: return False - def process(self, session, **kwargs): + def process(self, selection, **kwargs): self.clockify_api.set_api() workspace_id = self.clockify_api.workspace_id user_id = self.clockify_api.user_id @@ -37,10 +40,9 @@ class ClockifySync(LauncherAction): raise ClockifyPermissionsCheckFailed( "Current CLockify user is missing permissions for this action!" ) - project_name = session.get("AYON_PROJECT_NAME") or "" - if project_name.strip(): - projects_to_sync = [ayon_api.get_project(project_name)] + if selection.is_project_selected: + projects_to_sync = [selection.project_entity] else: projects_to_sync = ayon_api.get_projects() diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index a284464009..675346105c 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -80,6 +80,8 @@ class AfterEffectsSubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py index ae19e63a37..ab342c1a9d 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py @@ -102,6 +102,8 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py index cf124c0bcc..e3a4cd8030 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py @@ -103,17 +103,17 @@ class FusionSubmitDeadline( # Collect all saver instances in context that are to be rendered saver_instances = [] - for instance in context: - if instance.data["productType"] != "render": + for inst in context: + if inst.data["productType"] != "render": # Allow only saver family instances continue - if not instance.data.get("publish", True): + if not inst.data.get("publish", True): # Skip inactive instances continue - self.log.debug(instance.data["name"]) - saver_instances.append(instance) + self.log.debug(inst.data["name"]) + saver_instances.append(inst) if not saver_instances: raise RuntimeError("No instances found for Deadline submission") @@ -225,6 +225,8 @@ class FusionSubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py index beb8afc3a3..d52b16b27d 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -273,6 +273,8 @@ class HarmonySubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py index 1abefa515a..cba05f6948 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py @@ -106,12 +106,14 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", "AYON_WORKDIR", "AYON_APP_NAME", - "IS_TEST" + "IS_TEST", ] environment = { diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py index 5602b02707..0300b12104 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -207,6 +207,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py index ac01af901c..d70cb75bf3 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -376,6 +376,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, keys = [ "PYTHONPATH", "PATH", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", @@ -388,7 +390,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, "TOOL_ENV", "FOUNDRY_LICENSE", "OPENPYPE_SG_USER", - "AYON_BUNDLE_NAME", ] # add allowed keys from preset if any diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py index 50bd414587..4e4657d886 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py @@ -67,7 +67,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, "FTRACK_SERVER", "AYON_APP_NAME", "AYON_USERNAME", - "OPENPYPE_SG_USER", + "AYON_SG_USERNAME", "KITSU_LOGIN", "KITSU_PWD" ] @@ -133,6 +133,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, "AYON_RENDER_JOB": "0", "AYON_REMOTE_PUBLISH": "0", "AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"], + "AYON_DEFAULT_SETTINGS_VARIANT": ( + os.environ["AYON_DEFAULT_SETTINGS_VARIANT"] + ), } # add environments from self.environ_keys diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py index 8fd6a5024c..f3b68b4258 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py @@ -130,7 +130,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, "FTRACK_SERVER", "AYON_APP_NAME", "AYON_USERNAME", - "OPENPYPE_SG_USER", + "AYON_SG_USERNAME", "KITSU_LOGIN", "KITSU_PWD" ] @@ -210,6 +210,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, "AYON_RENDER_JOB": "0", "AYON_REMOTE_PUBLISH": "0", "AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"], + "AYON_DEFAULT_SETTINGS_VARIANT": ( + os.environ["AYON_DEFAULT_SETTINGS_VARIANT"] + ), } # add environments from self.environ_keys diff --git a/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 8df96b425e..ac04407f5b 100644 --- a/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -13,7 +13,7 @@ from Deadline.Scripting import ( FileUtils, DirectoryUtils, ) -__version__ = "1.0.1" +__version__ = "1.1.0" VERSION_REGEX = re.compile( r"(?P0|[1-9]\d*)" r"\.(?P0|[1-9]\d*)" @@ -463,19 +463,13 @@ def inject_ayon_environment(deadlinePlugin): export_url = os.path.join(tempfile.gettempdir(), temp_file_name) print(">>> Temporary path: {}".format(export_url)) - args = [ - "--headless", - "extractenvironments", - export_url - ] - add_kwargs = { "envgroup": "farm", } # Support backwards compatible keys for key, env_keys in ( ("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]), - ("asset", ["AYON_FOLDER_PATH", "AVALON_ASSET"]), + ("folder", ["AYON_FOLDER_PATH", "AVALON_ASSET"]), ("task", ["AYON_TASK_NAME", "AVALON_TASK"]), ("app", ["AYON_APP_NAME", "AVALON_APP_NAME"]), ): @@ -486,18 +480,37 @@ def inject_ayon_environment(deadlinePlugin): break add_kwargs[key] = value - if job.GetJobEnvironmentKeyValue("IS_TEST"): - args.append("--automatic-tests") - - if all(add_kwargs.values()): - for key, value in add_kwargs.items(): - args.extend(["--{}".format(key), value]) - else: + if not all(add_kwargs.values()): raise RuntimeError(( "Missing required env vars: AYON_PROJECT_NAME," " AYON_FOLDER_PATH, AYON_TASK_NAME, AYON_APP_NAME" )) + # Use applications addon arguments + # TODO validate if applications addon should be used + args = [ + "--headless", + "addon", + "applications", + "extractenvironments", + export_url + ] + # Backwards compatibility for older versions + legacy_args = [ + "--headless", + "extractenvironments", + export_url + ] + if job.GetJobEnvironmentKeyValue("IS_TEST"): + args.append("--automatic-tests") + + for key, value in add_kwargs.items(): + args.extend(["--{}".format(key), value]) + # Legacy arguments expect '--asset' instead of '--folder' + if key == "folder": + key = "asset" + legacy_args.extend(["--{}".format(key), value]) + environment = { "AYON_SERVER_URL": ayon_server_url, "AYON_API_KEY": ayon_api_key, @@ -516,9 +529,18 @@ def inject_ayon_environment(deadlinePlugin): ) if process_exitcode != 0: - raise RuntimeError( - "Failed to run Ayon process to extract environments." + print( + "Failed to run AYON process to extract environments. Trying" + " to use legacy arguments." ) + legacy_args_str = subprocess.list2cmdline(legacy_args) + process_exitcode = deadlinePlugin.RunProcess( + exe, legacy_args_str, os.path.dirname(exe), -1 + ) + if process_exitcode != 0: + raise RuntimeError( + "Failed to run AYON process to extract environments." + ) print(">>> Loading file ...") with open(export_url) as fp: diff --git a/client/ayon_core/modules/job_queue/addon.py b/client/ayon_core/modules/job_queue/addon.py index 32d06d0040..0fa54eb2f0 100644 --- a/client/ayon_core/modules/job_queue/addon.py +++ b/client/ayon_core/modules/job_queue/addon.py @@ -168,7 +168,7 @@ class JobQueueAddon(AYONAddon): @classmethod def start_worker(cls, app_name, server_url=None): import requests - from ayon_core.lib import ApplicationManager + from ayon_applications import ApplicationManager if not server_url: server_url = cls.get_server_url_from_settings() diff --git a/client/ayon_core/modules/loader_action.py b/client/ayon_core/modules/loader_action.py index a0cc417b66..1e45db05dc 100644 --- a/client/ayon_core/modules/loader_action.py +++ b/client/ayon_core/modules/loader_action.py @@ -13,7 +13,7 @@ class LoaderAddon(AYONAddon, ITrayAddon): # Add library tool self._loader_imported = False try: - from ayon_core.tools.loader.ui import LoaderWindow + from ayon_core.tools.loader.ui import LoaderWindow # noqa F401 self._loader_imported = True except Exception: diff --git a/client/ayon_core/modules/royalrender/api.py b/client/ayon_core/modules/royalrender/api.py index cd72014a42..a69f88c43c 100644 --- a/client/ayon_core/modules/royalrender/api.py +++ b/client/ayon_core/modules/royalrender/api.py @@ -1,13 +1,14 @@ # -*- coding: utf-8 -*- """Wrapper around Royal Render API.""" -import sys import os +import sys -from ayon_core.lib.local_settings import AYONSettingsRegistry -from ayon_core.lib import Logger, run_subprocess -from .rr_job import RRJob, SubmitFile, SubmitterParameter +from ayon_core.lib import Logger, run_subprocess, AYONSettingsRegistry from ayon_core.lib.vendor_bin_utils import find_tool_in_custom_paths +from .rr_job import SubmitFile +from .rr_job import RRjob, SubmitterParameter # noqa F401 + class Api: diff --git a/client/ayon_core/modules/royalrender/lib.py b/client/ayon_core/modules/royalrender/lib.py index 5392803710..82bc96e759 100644 --- a/client/ayon_core/modules/royalrender/lib.py +++ b/client/ayon_core/modules/royalrender/lib.py @@ -308,31 +308,45 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin, export_url = os.path.join(tempfile.gettempdir(), temp_file_name) print(">>> Temporary path: {}".format(export_url)) - args = [ - "--headless", - "extractenvironments", - export_url - ] - anatomy_data = instance.context.data["anatomyData"] + addons_manager = instance.context.data["ayonAddonsManager"] + applications_addon = addons_manager.get_enabled_addon("applications") + + folder_key = "folder" + if applications_addon is None: + # Use 'asset' when applications addon command is not used + folder_key = "asset" add_kwargs = { "project": anatomy_data["project"]["name"], - "asset": instance.context.data["folderPath"], + folder_key: instance.context.data["folderPath"], "task": anatomy_data["task"]["name"], "app": instance.context.data.get("appName"), "envgroup": "farm" } - if os.getenv('IS_TEST'): - args.append("--automatic-tests") - if not all(add_kwargs.values()): raise RuntimeError(( "Missing required env vars: AYON_PROJECT_NAME, AYON_FOLDER_PATH," " AYON_TASK_NAME, AYON_APP_NAME" )) + args = ["--headless"] + # Use applications addon to extract environments + # NOTE this is for backwards compatibility, the global command + # will be removed in future and only applications addon command + # should be used. + if applications_addon is not None: + args.extend(["addon", "applications"]) + + args.extend([ + "extractenvironments", + export_url + ]) + + if os.getenv('IS_TEST'): + args.append("--automatic-tests") + for key, value in add_kwargs.items(): args.extend([f"--{key}", value]) self.log.debug("Executing: {}".format(" ".join(args))) diff --git a/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py b/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py index 662913cadf..51500f84f5 100644 --- a/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py +++ b/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py @@ -3,7 +3,6 @@ import os import attr import json -import re import pyblish.api @@ -65,7 +64,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin, "FTRACK_SERVER", "AYON_APP_NAME", "AYON_USERNAME", - "OPENPYPE_SG_USER", + "AYON_SG_USERNAME", ] priority = 50 diff --git a/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py b/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py index da5d430939..b402d4034a 100644 --- a/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py +++ b/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PostLaunchHook, LaunchTypes +from ayon_applications import PostLaunchHook, LaunchTypes class PostStartTimerHook(PostLaunchHook): diff --git a/client/ayon_core/pipeline/actions.py b/client/ayon_core/pipeline/actions.py index 8e0ce7e583..eae2fc94b5 100644 --- a/client/ayon_core/pipeline/actions.py +++ b/client/ayon_core/pipeline/actions.py @@ -1,4 +1,8 @@ import logging +import warnings + +import ayon_api + from ayon_core.pipeline.plugin_discover import ( discover, register_plugin, @@ -10,6 +14,288 @@ from ayon_core.pipeline.plugin_discover import ( from .load.utils import get_representation_path_from_context +class LauncherActionSelection: + """Object helper to pass selection to actions. + + Object support backwards compatibility for 'session' from OpenPype where + environment variable keys were used to define selection. + + Args: + project_name (str): Selected project name. + folder_id (str): Selected folder id. + task_id (str): Selected task id. + folder_path (Optional[str]): Selected folder path. + task_name (Optional[str]): Selected task name. + project_entity (Optional[dict[str, Any]]): Project entity. + folder_entity (Optional[dict[str, Any]]): Folder entity. + task_entity (Optional[dict[str, Any]]): Task entity. + + """ + def __init__( + self, + project_name, + folder_id, + task_id, + folder_path=None, + task_name=None, + project_entity=None, + folder_entity=None, + task_entity=None + ): + self._project_name = project_name + self._folder_id = folder_id + self._task_id = task_id + + self._folder_path = folder_path + self._task_name = task_name + + self._project_entity = project_entity + self._folder_entity = folder_entity + self._task_entity = task_entity + + def __getitem__(self, key): + warnings.warn( + ( + "Using deprecated access to selection data. Please use" + " attributes and methods" + " defined by 'LauncherActionSelection'." + ), + category=DeprecationWarning + ) + if key in {"AYON_PROJECT_NAME", "AVALON_PROJECT"}: + return self.project_name + if key in {"AYON_FOLDER_PATH", "AVALON_ASSET"}: + return self.folder_path + if key in {"AYON_TASK_NAME", "AVALON_TASK"}: + return self.task_name + raise KeyError(f"Key: {key} not found") + + def __iter__(self): + for key in self.keys(): + yield key + + def __contains__(self, key): + warnings.warn( + ( + "Using deprecated access to selection data. Please use" + " attributes and methods" + " defined by 'LauncherActionSelection'." + ), + category=DeprecationWarning + ) + # Fake missing keys check for backwards compatibility + if key in { + "AYON_PROJECT_NAME", + "AVALON_PROJECT", + }: + return self._project_name is not None + if key in { + "AYON_FOLDER_PATH", + "AVALON_ASSET", + }: + return self._folder_id is not None + if key in { + "AYON_TASK_NAME", + "AVALON_TASK", + }: + return self._task_id is not None + return False + + def get(self, key, default=None): + """ + + Deprecated: + Added for backwards compatibility with older actions. + + """ + warnings.warn( + ( + "Using deprecated access to selection data. Please use" + " attributes and methods" + " defined by 'LauncherActionSelection'." + ), + category=DeprecationWarning + ) + try: + return self[key] + except KeyError: + return default + + def items(self): + """ + + Deprecated: + Added for backwards compatibility with older actions. + + """ + for key, value in ( + ("AYON_PROJECT_NAME", self.project_name), + ("AYON_FOLDER_PATH", self.folder_path), + ("AYON_TASK_NAME", self.task_name), + ): + if value is not None: + yield (key, value) + + def keys(self): + """ + + Deprecated: + Added for backwards compatibility with older actions. + + """ + for key, _ in self.items(): + yield key + + def values(self): + """ + + Deprecated: + Added for backwards compatibility with older actions. + + """ + for _, value in self.items(): + yield value + + def get_project_name(self): + """Selected project name. + + Returns: + Union[str, None]: Selected project name. + + """ + return self._project_name + + def get_folder_id(self): + """Selected folder id. + + Returns: + Union[str, None]: Selected folder id. + + """ + return self._folder_id + + def get_folder_path(self): + """Selected folder path. + + Returns: + Union[str, None]: Selected folder path. + + """ + if self._folder_id is None: + return None + if self._folder_path is None: + self._folder_path = self.folder_entity["path"] + return self._folder_path + + def get_task_id(self): + """Selected task id. + + Returns: + Union[str, None]: Selected task id. + + """ + return self._task_id + + def get_task_name(self): + """Selected task name. + + Returns: + Union[str, None]: Selected task name. + + """ + if self._task_id is None: + return None + if self._task_name is None: + self._task_name = self.task_entity["name"] + return self._task_name + + def get_project_entity(self): + """Project entity for the selection. + + Returns: + Union[dict[str, Any], None]: Project entity. + + """ + if self._project_name is None: + return None + if self._project_entity is None: + self._project_entity = ayon_api.get_project(self._project_name) + return self._project_entity + + def get_folder_entity(self): + """Folder entity for the selection. + + Returns: + Union[dict[str, Any], None]: Folder entity. + + """ + if self._project_name is None or self._folder_id is None: + return None + if self._folder_entity is None: + self._folder_entity = ayon_api.get_folder_by_id( + self._project_name, self._folder_id + ) + return self._folder_entity + + def get_task_entity(self): + """Task entity for the selection. + + Returns: + Union[dict[str, Any], None]: Task entity. + + """ + if ( + self._project_name is None + or self._task_id is None + ): + return None + if self._task_entity is None: + self._task_entity = ayon_api.get_task_by_id( + self._project_name, self._task_id + ) + return self._task_entity + + @property + def is_project_selected(self): + """Return whether a project is selected. + + Returns: + bool: Whether a project is selected. + + """ + return self._project_name is not None + + @property + def is_folder_selected(self): + """Return whether a folder is selected. + + Returns: + bool: Whether a folder is selected. + + """ + return self._folder_id is not None + + @property + def is_task_selected(self): + """Return whether a task is selected. + + Returns: + bool: Whether a task is selected. + + """ + return self._task_id is not None + + project_name = property(get_project_name) + folder_id = property(get_folder_id) + task_id = property(get_task_id) + folder_path = property(get_folder_path) + task_name = property(get_task_name) + + project_entity = property(get_project_entity) + folder_entity = property(get_folder_entity) + task_entity = property(get_task_entity) + + class LauncherAction(object): """A custom action available""" name = None @@ -21,17 +307,23 @@ class LauncherAction(object): log = logging.getLogger("LauncherAction") log.propagate = True - def is_compatible(self, session): + def is_compatible(self, selection): """Return whether the class is compatible with the Session. Args: - session (dict[str, Union[str, None]]): Session data with - AYON_PROJECT_NAME, AYON_FOLDER_PATH and AYON_TASK_NAME. - """ + selection (LauncherActionSelection): Data with selection. + """ return True - def process(self, session, **kwargs): + def process(self, selection, **kwargs): + """Process the action. + + Args: + selection (LauncherActionSelection): Data with selection. + **kwargs: Additional arguments. + + """ pass diff --git a/client/ayon_core/pipeline/anatomy/anatomy.py b/client/ayon_core/pipeline/anatomy/anatomy.py index 0d250116bd..2aa8eeddbc 100644 --- a/client/ayon_core/pipeline/anatomy/anatomy.py +++ b/client/ayon_core/pipeline/anatomy/anatomy.py @@ -447,7 +447,7 @@ class CacheItem: class Anatomy(BaseAnatomy): - _sync_server_addon_cache = CacheItem() + _sitesync_addon_cache = CacheItem() _project_cache = collections.defaultdict(CacheItem) _default_site_id_cache = collections.defaultdict(CacheItem) _root_overrides_cache = collections.defaultdict( @@ -482,13 +482,13 @@ class Anatomy(BaseAnatomy): return copy.deepcopy(project_cache.data) @classmethod - def get_sync_server_addon(cls): - if cls._sync_server_addon_cache.is_outdated: + def get_sitesync_addon(cls): + if cls._sitesync_addon_cache.is_outdated: manager = AddonsManager() - cls._sync_server_addon_cache.update_data( - manager.get_enabled_addon("sync_server") + cls._sitesync_addon_cache.update_data( + manager.get_enabled_addon("sitesync") ) - return cls._sync_server_addon_cache.data + return cls._sitesync_addon_cache.data @classmethod def _get_studio_roots_overrides(cls, project_name): @@ -525,8 +525,8 @@ class Anatomy(BaseAnatomy): """ # First check if sync server is available and enabled - sync_server = cls.get_sync_server_addon() - if sync_server is None or not sync_server.enabled: + sitesync_addon = cls.get_sitesync_addon() + if sitesync_addon is None or not sitesync_addon.enabled: # QUESTION is ok to force 'studio' when site sync is not enabled? site_name = "studio" @@ -535,7 +535,7 @@ class Anatomy(BaseAnatomy): project_cache = cls._default_site_id_cache[project_name] if project_cache.is_outdated: project_cache.update_data( - sync_server.get_active_site_type(project_name) + sitesync_addon.get_active_site_type(project_name) ) site_name = project_cache.data @@ -549,7 +549,7 @@ class Anatomy(BaseAnatomy): ) else: # Ask sync server to get roots overrides - roots_overrides = sync_server.get_site_root_overrides( + roots_overrides = sitesync_addon.get_site_root_overrides( project_name, site_name ) site_cache.update_data(roots_overrides) diff --git a/client/ayon_core/pipeline/anatomy/templates.py b/client/ayon_core/pipeline/anatomy/templates.py index 46cad385f0..d89b70719e 100644 --- a/client/ayon_core/pipeline/anatomy/templates.py +++ b/client/ayon_core/pipeline/anatomy/templates.py @@ -14,7 +14,6 @@ from .exceptions import ( TemplateMissingKey, AnatomyTemplateUnsolved, ) -from .roots import RootItem _PLACEHOLDER = object() diff --git a/client/ayon_core/pipeline/context_tools.py b/client/ayon_core/pipeline/context_tools.py index 84a17be8f2..33567d7280 100644 --- a/client/ayon_core/pipeline/context_tools.py +++ b/client/ayon_core/pipeline/context_tools.py @@ -1,7 +1,6 @@ """Core pipeline functionality""" import os -import types import logging import platform import uuid @@ -21,7 +20,6 @@ from .anatomy import Anatomy from .template_data import get_template_data_with_names from .workfile import ( get_workdir, - get_workfile_template_key, get_custom_workfile_template_by_string_context, ) from . import ( @@ -97,8 +95,8 @@ def install_host(host): """Install `host` into the running Python session. Args: - host (module): A Python module containing the Avalon - avalon host-interface. + host (HostBase): A host interface object. + """ global _is_installed @@ -154,6 +152,13 @@ def install_host(host): def install_ayon_plugins(project_name=None, host_name=None): + """Install AYON core plugins and make sure the core is initialized. + + Args: + project_name (Optional[str]): Name of project to install plugins for. + host_name (Optional[str]): Name of host to install plugins for. + + """ # Make sure global AYON connection has set site id and version # - this is necessary if 'install_host' is not called initialize_ayon_connection() @@ -223,6 +228,12 @@ def install_ayon_plugins(project_name=None, host_name=None): def install_openpype_plugins(project_name=None, host_name=None): + """Install AYON core plugins and make sure the core is initialized. + + Deprecated: + Use `install_ayon_plugins` instead. + + """ install_ayon_plugins(project_name, host_name) @@ -281,47 +292,6 @@ def deregister_host(): _registered_host["_"] = None -def debug_host(): - """A debug host, useful to debugging features that depend on a host""" - - host = types.ModuleType("debugHost") - - def ls(): - containers = [ - { - "representation": "ee-ft-a-uuid1", - "schema": "openpype:container-1.0", - "name": "Bruce01", - "objectName": "Bruce01_node", - "namespace": "_bruce01_", - "version": 3, - }, - { - "representation": "aa-bc-s-uuid2", - "schema": "openpype:container-1.0", - "name": "Bruce02", - "objectName": "Bruce01_node", - "namespace": "_bruce02_", - "version": 2, - } - ] - - for container in containers: - yield container - - host.__dict__.update({ - "ls": ls, - "open_file": lambda fname: None, - "save_file": lambda fname: None, - "current_file": lambda: os.path.expanduser("~/temp.txt"), - "has_unsaved_changes": lambda: False, - "work_root": lambda: os.path.expanduser("~/temp"), - "file_extensions": lambda: ["txt"], - }) - - return host - - def get_current_host_name(): """Current host name. @@ -347,7 +317,8 @@ def get_global_context(): Use 'get_current_context' to make sure you'll get current host integration context info. - Example: + Example:: + { "project_name": "Commercial", "folder_path": "Bunny", @@ -411,42 +382,67 @@ def get_current_project_entity(fields=None): return ayon_api.get_project(project_name, fields=fields) -def get_current_project_folder(folder_path=None, folder_id=None, fields=None): +def get_current_folder_entity(fields=None): """Helper function to get folder entity based on current context. This function should be called only in process where host is installed. - Folder is found out based on passed folder path or id (not both). Folder - path is not used for filtering if folder id is passed. When both - folder path and id are missing then current folder path is used. + Folder is based on current context project name and folder path. Args: - folder_path (Union[str, None]): Folder path used for filter. - folder_id (Union[str, None]): Folder id. If entered then - is used as only filter. fields (Optional[Iterable[str]]): Limit returned data of folder entity to specific keys. Returns: - Union[dict[str, Any], None]: Fodler entity or None. + Union[dict[str, Any], None]: Folder entity or None. + """ + context = get_current_context() + project_name = context["project_name"] + folder_path = context["folder_path"] - project_name = get_current_project_name() - if folder_id: - return ayon_api.get_folder_by_id( - project_name, folder_id, fields=fields - ) - - if not folder_path: - folder_path = get_current_folder_path() - # Skip if is not set even on context - if not folder_path: - return None + # Skip if is not set even on context + if not project_name or not folder_path: + return None return ayon_api.get_folder_by_path( project_name, folder_path, fields=fields ) +def get_current_task_entity(fields=None): + """Helper function to get task entity based on current context. + + This function should be called only in process where host is installed. + + Task is based on current context project name, folder path + and task name. + + Args: + fields (Optional[Iterable[str]]): Limit returned data of task entity + to specific keys. + + Returns: + Union[dict[str, Any], None]: Task entity or None. + + """ + context = get_current_context() + project_name = context["project_name"] + folder_path = context["folder_path"] + task_name = context["task_name"] + + # Skip if is not set even on context + if not project_name or not folder_path or not task_name: + return None + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"id"} + ) + if not folder_entity: + return None + return ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name, fields=fields + ) + + def is_representation_from_latest(representation): """Return whether the representation is from latest version @@ -515,88 +511,13 @@ def get_current_context_template_data(settings=None): ) -def get_workdir_from_session(session=None, template_key=None): - """Template data for template fill from session keys. - - Args: - session (Union[Dict[str, str], None]): The Session to use. If not - provided use the currently active global Session. - template_key (str): Prepared template key from which workdir is - calculated. - - Returns: - str: Workdir path. - """ - - if session is not None: - project_name = session["AYON_PROJECT_NAME"] - host_name = session["AYON_HOST_NAME"] - else: - project_name = get_current_project_name() - host_name = get_current_host_name() - template_data = get_template_data_from_session(session) - - if not template_key: - task_type = template_data["task"]["type"] - template_key = get_workfile_template_key( - project_name, - task_type, - host_name, - ) - - anatomy = Anatomy(project_name) - template_obj = anatomy.get_template_item("work", template_key, "directory") - path = template_obj.format_strict(template_data) - if path: - path = os.path.normpath(path) - return path - - -def get_custom_workfile_template_from_session( - session=None, project_settings=None -): - """Filter and fill workfile template profiles by current context. - - This function cab be used only inside host where context is set. - - Args: - session (Optional[Dict[str, str]]): Session from which are taken - data. - project_settings(Optional[Dict[str, Any]]): Project settings. - - Returns: - str: Path to template or None if none of profiles match current - context. (Existence of formatted path is not validated.) - """ - - if session is not None: - project_name = session["AYON_PROJECT_NAME"] - folder_path = session["AYON_FOLDER_PATH"] - task_name = session["AYON_TASK_NAME"] - host_name = session["AYON_HOST_NAME"] - else: - context = get_current_context() - project_name = context["project_name"] - folder_path = context["folder_path"] - task_name = context["task_name"] - host_name = get_current_host_name() - - return get_custom_workfile_template_by_string_context( - project_name, - folder_path, - task_name, - host_name, - project_settings=project_settings - ) - - def get_current_context_custom_workfile_template(project_settings=None): """Filter and fill workfile template profiles by current context. - This function can be used only inside host where context is set. + This function can be used only inside host where current context is set. Args: - project_settings(Optional[Dict[str, Any]]): Project settings. + project_settings (Optional[dict[str, Any]]): Project settings Returns: str: Path to template or None if none of profiles match current diff --git a/client/ayon_core/pipeline/create/README.md b/client/ayon_core/pipeline/create/README.md index bbfd1bfa0f..09d3a22222 100644 --- a/client/ayon_core/pipeline/create/README.md +++ b/client/ayon_core/pipeline/create/README.md @@ -8,7 +8,7 @@ Discovers Creator plugins to be able create new instances and convert existing i Publish plugins are loaded because they can also define attributes definitions. These are less product type specific To be able define attributes Publish plugin must inherit from `AYONPyblishPluginMixin` and must override `get_attribute_defs` class method which must return list of attribute definitions. Values of publish plugin definitions are stored per plugin name under `publish_attributes`. Also can override `convert_attribute_values` class method which gives ability to modify values on instance before are used in CreatedInstance. Method `convert_attribute_values` can be also used without `get_attribute_defs` to modify values when changing compatibility (remove metadata from instance because are irrelevant). -Possible attribute definitions can be found in `openpype/pipeline/lib/attribute_definitions.py`. +Possible attribute definitions can be found in `ayon_core/lib/attribute_definitions.py`. Except creating and removing instances are all changes not automatically propagated to host context (scene/workfile/...) to propagate changes call `save_changes` which trigger update of all instances in context using Creators implementation. diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index ca9896fb3f..b8618738fb 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -1790,10 +1790,10 @@ class CreateContext: creator_identifier = creator_class.identifier if creator_identifier in creators: - self.log.warning(( - "Duplicated Creator identifier. " - "Using first and skipping following" - )) + self.log.warning( + "Duplicate Creator identifier: '%s'. Using first Creator " + "and skipping: %s", creator_identifier, creator_class + ) continue # Filter by host name diff --git a/client/ayon_core/pipeline/farm/pyblish_functions.py b/client/ayon_core/pipeline/farm/pyblish_functions.py index dadf2cbe1a..eb6f8569d9 100644 --- a/client/ayon_core/pipeline/farm/pyblish_functions.py +++ b/client/ayon_core/pipeline/farm/pyblish_functions.py @@ -6,13 +6,11 @@ from copy import deepcopy import attr import ayon_api -import pyblish.api import clique from ayon_core.pipeline import ( get_current_project_name, get_representation_path, - Anatomy, ) from ayon_core.lib import Logger from ayon_core.pipeline.publish import KnownPublishError @@ -137,7 +135,7 @@ def get_transferable_representations(instance): list of dicts: List of transferable representations. """ - anatomy = instance.context.data["anatomy"] # type: Anatomy + anatomy = instance.context.data["anatomy"] to_transfer = [] for representation in instance.data.get("representations", []): @@ -166,7 +164,6 @@ def get_transferable_representations(instance): def create_skeleton_instance( instance, families_transfer=None, instance_transfer=None): - # type: (pyblish.api.Instance, list, dict) -> dict """Create skeleton instance from original instance data. This will create dictionary containing skeleton @@ -191,7 +188,7 @@ def create_skeleton_instance( context = instance.context data = instance.data.copy() - anatomy = instance.context.data["anatomy"] # type: Anatomy + anatomy = instance.context.data["anatomy"] # get time related data from instance (or context) time_data = get_time_data_from_instance_or_context(instance) @@ -620,15 +617,32 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data, aov_patterns = aov_filter preview = match_aov_pattern(app, aov_patterns, render_file_name) - # toggle preview on if multipart is on - if instance.data.get("multipartExr"): - log.debug("Adding preview tag because its multipartExr") - preview = True new_instance = deepcopy(skeleton) new_instance["productName"] = product_name new_instance["productGroup"] = group_name + # toggle preview on if multipart is on + # Because we cant query the multipartExr data member of each AOV we'll + # need to have hardcoded rule of excluding any renders with + # "cryptomatte" in the file name from being a multipart EXR. This issue + # happens with Redshift that forces Cryptomatte renders to be separate + # files even when the rest of the AOVs are merged into a single EXR. + # There might be an edge case where the main instance has cryptomatte + # in the name even though it's a multipart EXR. + if instance.data.get("renderer") == "redshift": + if ( + instance.data.get("multipartExr") and + "cryptomatte" not in render_file_name.lower() + ): + log.debug("Adding preview tag because it's multipartExr") + preview = True + else: + new_instance["multipartExr"] = False + elif instance.data.get("multipartExr"): + log.debug("Adding preview tag because its multipartExr") + preview = True + # explicitly disable review by user preview = preview and not do_not_add_review if preview: @@ -751,7 +765,6 @@ def get_resources(project_name, version_entity, extension=None): def create_skeleton_instance_cache(instance): - # type: (pyblish.api.Instance, list, dict) -> dict """Create skeleton instance from original instance data. This will create dictionary containing skeleton @@ -771,7 +784,7 @@ def create_skeleton_instance_cache(instance): context = instance.context data = instance.data.copy() - anatomy = instance.context.data["anatomy"] # type: Anatomy + anatomy = instance.context.data["anatomy"] # get time related data from instance (or context) time_data = get_time_data_from_instance_or_context(instance) @@ -1005,7 +1018,7 @@ def copy_extend_frames(instance, representation): start = instance.data.get("frameStart") end = instance.data.get("frameEnd") project_name = instance.context.data["project"] - anatomy = instance.context.data["anatomy"] # type: Anatomy + anatomy = instance.context.data["anatomy"] folder_entity = ayon_api.get_folder_by_path( project_name, instance.data.get("folderPath") diff --git a/client/ayon_core/pipeline/legacy_io.py b/client/ayon_core/pipeline/legacy_io.py deleted file mode 100644 index d5b555845b..0000000000 --- a/client/ayon_core/pipeline/legacy_io.py +++ /dev/null @@ -1,36 +0,0 @@ -import logging -from ayon_core.pipeline import get_current_project_name - -Session = {} - -log = logging.getLogger(__name__) -log.warning( - "DEPRECATION WARNING: 'legacy_io' is deprecated and will be removed in" - " future versions of ayon-core addon." - "\nReading from Session won't give you updated information and changing" - " values won't affect global state of a process." -) - - -def session_data_from_environment(context_keys=False): - return {} - - -def is_installed(): - return False - - -def install(): - pass - - -def uninstall(): - pass - - -def active_project(*args, **kwargs): - return get_current_project_name() - - -def current_project(*args, **kwargs): - return get_current_project_name() diff --git a/client/ayon_core/pipeline/load/plugins.py b/client/ayon_core/pipeline/load/plugins.py index 064af4ddc1..2475800cbb 100644 --- a/client/ayon_core/pipeline/load/plugins.py +++ b/client/ayon_core/pipeline/load/plugins.py @@ -24,7 +24,7 @@ class LoaderPlugin(list): """ product_types = set() - representations = [] + representations = set() extensions = {"*"} order = 0 is_multiple_contexts_compatible = False @@ -162,6 +162,15 @@ class LoaderPlugin(list): @classmethod def get_representations(cls): + """Representation names with which is plugin compatible. + + Empty set makes the plugin incompatible with any representation. To + allow compatibility with all representations use '{"*"}'. + + Returns: + set[str]: Names with which is plugin compatible. + + """ return cls.representations @classmethod diff --git a/client/ayon_core/pipeline/publish/abstract_collect_render.py b/client/ayon_core/pipeline/publish/abstract_collect_render.py index 745632ca0a..c50dc16380 100644 --- a/client/ayon_core/pipeline/publish/abstract_collect_render.py +++ b/client/ayon_core/pipeline/publish/abstract_collect_render.py @@ -81,6 +81,9 @@ class RenderInstance(object): outputDir = attr.ib(default=None) context = attr.ib(default=None) + # The source instance the data of this render instance should merge into + source_instance = attr.ib(default=None, type=pyblish.api.Instance) + @frameStart.validator def check_frame_start(self, _, value): """Validate if frame start is not larger then end.""" @@ -214,8 +217,11 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): data = self.add_additional_data(data) render_instance_dict = attr.asdict(render_instance) - instance = context.create_instance(render_instance.name) - instance.data["label"] = render_instance.label + # Merge into source instance if provided, otherwise create instance + instance = render_instance_dict.pop("source_instance", None) + if instance is None: + instance = context.create_instance(render_instance.name) + instance.data.update(render_instance_dict) instance.data.update(data) diff --git a/client/ayon_core/pipeline/schema/__init__.py b/client/ayon_core/pipeline/schema/__init__.py index 67cf120b59..db98a6d080 100644 --- a/client/ayon_core/pipeline/schema/__init__.py +++ b/client/ayon_core/pipeline/schema/__init__.py @@ -13,7 +13,6 @@ Resources: """ import os -import re import json import logging diff --git a/client/ayon_core/plugins/actions/open_file_explorer.py b/client/ayon_core/plugins/actions/open_file_explorer.py index 69375a7859..50a3107444 100644 --- a/client/ayon_core/plugins/actions/open_file_explorer.py +++ b/client/ayon_core/plugins/actions/open_file_explorer.py @@ -3,8 +3,6 @@ import platform import subprocess from string import Formatter -import ayon_api - from ayon_core.pipeline import ( Anatomy, LauncherAction, @@ -18,18 +16,14 @@ class OpenTaskPath(LauncherAction): icon = "folder-open" order = 500 - def is_compatible(self, session): + def is_compatible(self, selection): """Return whether the action is compatible with the session""" - return bool(session.get("AYON_FOLDER_PATH")) + return selection.is_folder_selected - def process(self, session, **kwargs): + def process(self, selection, **kwargs): from qtpy import QtCore, QtWidgets - project_name = session["AYON_PROJECT_NAME"] - folder_path = session["AYON_FOLDER_PATH"] - task_name = session.get("AYON_TASK_NAME", None) - - path = self._get_workdir(project_name, folder_path, task_name) + path = self._get_workdir(selection) if not path: return @@ -60,16 +54,17 @@ class OpenTaskPath(LauncherAction): path = path.split(field, 1)[0] return path - def _get_workdir(self, project_name, folder_path, task_name): - project_entity = ayon_api.get_project(project_name) - folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) - task_entity = ayon_api.get_task_by_name( - project_name, folder_entity["id"], task_name + def _get_workdir(self, selection): + data = get_template_data( + selection.project_entity, + selection.folder_entity, + selection.task_entity ) - data = get_template_data(project_entity, folder_entity, task_entity) - - anatomy = Anatomy(project_name) + anatomy = Anatomy( + selection.project_name, + project_entity=selection.project_entity + ) workdir = anatomy.get_template_item( "work", "default", "folder" ).format(data) diff --git a/client/ayon_core/plugins/load/copy_file.py b/client/ayon_core/plugins/load/copy_file.py index 5e6daa866b..08dad03be3 100644 --- a/client/ayon_core/plugins/load/copy_file.py +++ b/client/ayon_core/plugins/load/copy_file.py @@ -5,7 +5,7 @@ from ayon_core.pipeline import load class CopyFile(load.LoaderPlugin): """Copy the published file to be pasted at the desired location""" - representations = ["*"] + representations = {"*"} product_types = {"*"} label = "Copy File" diff --git a/client/ayon_core/plugins/load/copy_file_path.py b/client/ayon_core/plugins/load/copy_file_path.py index ecde3bc55f..fdf31b5e02 100644 --- a/client/ayon_core/plugins/load/copy_file_path.py +++ b/client/ayon_core/plugins/load/copy_file_path.py @@ -5,7 +5,7 @@ from ayon_core.pipeline import load class CopyFilePath(load.LoaderPlugin): """Copy published file path to clipboard""" - representations = ["*"] + representations = {"*"} product_types = {"*"} label = "Copy File Path" diff --git a/client/ayon_core/plugins/load/delete_old_versions.py b/client/ayon_core/plugins/load/delete_old_versions.py index 04873d8b5c..8e04fd9827 100644 --- a/client/ayon_core/plugins/load/delete_old_versions.py +++ b/client/ayon_core/plugins/load/delete_old_versions.py @@ -26,7 +26,7 @@ # is_multiple_contexts_compatible = True # sequence_splitter = "__sequence_splitter__" # -# representations = ["*"] +# representations = {"*"} # product_types = {"*"} # tool_names = ["library_loader"] # diff --git a/client/ayon_core/plugins/load/delivery.py b/client/ayon_core/plugins/load/delivery.py index f8184c8567..c7954a18b2 100644 --- a/client/ayon_core/plugins/load/delivery.py +++ b/client/ayon_core/plugins/load/delivery.py @@ -28,7 +28,7 @@ class Delivery(load.ProductLoaderPlugin): is_multiple_contexts_compatible = True sequence_splitter = "__sequence_splitter__" - representations = ["*"] + representations = {"*"} product_types = {"*"} tool_names = ["library_loader"] @@ -91,9 +91,15 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): longest_key = max(self.templates.keys(), key=len) dropdown.setMinimumContentsLength(len(longest_key)) - template_label = QtWidgets.QLabel() - template_label.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) - template_label.setTextInteractionFlags(QtCore.Qt.TextSelectableByMouse) + template_dir_label = QtWidgets.QLabel() + template_dir_label.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) + template_dir_label.setTextInteractionFlags( + QtCore.Qt.TextSelectableByMouse) + + template_file_label = QtWidgets.QLabel() + template_file_label.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) + template_file_label.setTextInteractionFlags( + QtCore.Qt.TextSelectableByMouse) renumber_frame = QtWidgets.QCheckBox() @@ -123,7 +129,8 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): input_layout.addRow("Selected representations", selected_label) input_layout.addRow("Delivery template", dropdown) - input_layout.addRow("Template value", template_label) + input_layout.addRow("Directory template", template_dir_label) + input_layout.addRow("File template", template_file_label) input_layout.addRow("Renumber Frame", renumber_frame) input_layout.addRow("Renumber start frame", first_frame_start) input_layout.addRow("Root", root_line_edit) @@ -151,7 +158,8 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): layout.addWidget(text_area) self.selected_label = selected_label - self.template_label = template_label + self.template_dir_label = template_dir_label + self.template_file_label = template_file_label self.dropdown = dropdown self.first_frame_start = first_frame_start self.renumber_frame = renumber_frame @@ -282,11 +290,13 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): """Adds list of delivery templates from Anatomy to dropdown.""" templates = {} for template_name, value in anatomy.templates["delivery"].items(): - path_template = value["path"] - if ( - not isinstance(path_template, str) - or not path_template.startswith('{root') - ): + directory_template = value["directory"] + if not directory_template.startswith("{root"): + self.log.warning( + "Skipping template '%s' because directory template does " + "not start with `{root` in value: %s", + template_name, directory_template + ) continue templates[template_name] = value @@ -350,7 +360,8 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): name = self.dropdown.currentText() template_value = self.templates.get(name) if template_value: - self.template_label.setText(template_value) + self.template_dir_label.setText(template_value["directory"]) + self.template_file_label.setText(template_value["file"]) self.btn_delivery.setEnabled(bool(self._get_selected_repres())) def _update_progress(self, uploaded): diff --git a/client/ayon_core/plugins/load/open_djv.py b/client/ayon_core/plugins/load/open_djv.py deleted file mode 100644 index 30023ac1f5..0000000000 --- a/client/ayon_core/plugins/load/open_djv.py +++ /dev/null @@ -1,64 +0,0 @@ -import os -from ayon_core.lib import ApplicationManager -from ayon_core.pipeline import load - - -def existing_djv_path(): - app_manager = ApplicationManager() - djv_list = [] - - for app_name, app in app_manager.applications.items(): - if 'djv' in app_name and app.find_executable(): - djv_list.append(app_name) - - return djv_list - - -class OpenInDJV(load.LoaderPlugin): - """Open Image Sequence with system default""" - - djv_list = existing_djv_path() - product_types = {"*"} if djv_list else [] - representations = ["*"] - extensions = { - "cin", "dpx", "avi", "dv", "gif", "flv", "mkv", "mov", "mpg", "mpeg", - "mp4", "m4v", "mxf", "iff", "z", "ifl", "jpeg", "jpg", "jfif", "lut", - "1dl", "exr", "pic", "png", "ppm", "pnm", "pgm", "pbm", "rla", "rpf", - "sgi", "rgba", "rgb", "bw", "tga", "tiff", "tif", "img", "h264", - } - - label = "Open in DJV" - order = -10 - icon = "play-circle" - color = "orange" - - def load(self, context, name, namespace, data): - import clique - - path = self.filepath_from_context(context) - directory = os.path.dirname(path) - - pattern = clique.PATTERNS["frames"] - files = os.listdir(directory) - collections, remainder = clique.assemble( - files, - patterns=[pattern], - minimum_items=1 - ) - - if not remainder: - sequence = collections[0] - first_image = list(sequence)[0] - else: - first_image = path - filepath = os.path.normpath(os.path.join(directory, first_image)) - - self.log.info("Opening : {}".format(filepath)) - - last_djv_version = sorted(self.djv_list)[-1] - - app_manager = ApplicationManager() - djv = app_manager.applications.get(last_djv_version) - djv.arguments.append(filepath) - - app_manager.launch(last_djv_version) diff --git a/client/ayon_core/plugins/load/open_file.py b/client/ayon_core/plugins/load/open_file.py index fc57708cd6..3b5fbbc0c9 100644 --- a/client/ayon_core/plugins/load/open_file.py +++ b/client/ayon_core/plugins/load/open_file.py @@ -19,7 +19,7 @@ class OpenFile(load.LoaderPlugin): """Open Image Sequence or Video with system default""" product_types = {"render2d"} - representations = ["*"] + representations = {"*"} label = "Open" order = -10 diff --git a/client/ayon_core/plugins/load/push_to_library.py b/client/ayon_core/plugins/load/push_to_library.py index 02d834bc95..981028d734 100644 --- a/client/ayon_core/plugins/load/push_to_library.py +++ b/client/ayon_core/plugins/load/push_to_library.py @@ -11,7 +11,7 @@ class PushToLibraryProject(load.ProductLoaderPlugin): is_multiple_contexts_compatible = True - representations = ["*"] + representations = {"*"} product_types = {"*"} label = "Push to Library project" diff --git a/client/ayon_core/plugins/publish/collect_host_name.py b/client/ayon_core/plugins/publish/collect_host_name.py index e76579bbd2..ea4ec7ad41 100644 --- a/client/ayon_core/plugins/publish/collect_host_name.py +++ b/client/ayon_core/plugins/publish/collect_host_name.py @@ -1,14 +1,13 @@ """ Requires: None + Provides: - context -> host (str) + context -> hostName (str) """ import os import pyblish.api -from ayon_core.lib import ApplicationManager - class CollectHostName(pyblish.api.ContextPlugin): """Collect avalon host name to context.""" @@ -18,30 +17,8 @@ class CollectHostName(pyblish.api.ContextPlugin): def process(self, context): host_name = context.data.get("hostName") - app_name = context.data.get("appName") - app_label = context.data.get("appLabel") - # Don't override value if is already set - if host_name and app_name and app_label: + if host_name: return # Use AYON_HOST_NAME to get host name if available - if not host_name: - host_name = os.environ.get("AYON_HOST_NAME") - - # Use AYON_APP_NAME to get full app name - if not app_name: - app_name = os.environ.get("AYON_APP_NAME") - - # Fill missing values based on app full name - if (not host_name or not app_label) and app_name: - app_manager = ApplicationManager() - app = app_manager.applications.get(app_name) - if app: - if not host_name: - host_name = app.host_name - if not app_label: - app_label = app.full_label - - context.data["hostName"] = host_name - context.data["appName"] = app_name - context.data["appLabel"] = app_label + context.data["hostName"] = os.environ.get("AYON_HOST_NAME") diff --git a/client/ayon_core/plugins/publish/collect_settings.py b/client/ayon_core/plugins/publish/collect_settings.py index 66b89a114c..db58e7eaa9 100644 --- a/client/ayon_core/plugins/publish/collect_settings.py +++ b/client/ayon_core/plugins/publish/collect_settings.py @@ -1,5 +1,5 @@ from pyblish import api -from ayon_core.settings import get_current_project_settings +from ayon_core.settings import get_project_settings class CollectSettings(api.ContextPlugin): @@ -9,4 +9,9 @@ class CollectSettings(api.ContextPlugin): label = "Collect Settings" def process(self, context): - context.data["project_settings"] = get_current_project_settings() + project_name = context.data["projectName"] + self.log.debug( + "Collecting settings for project: {}".format(project_name) + ) + project_settings = get_project_settings(project_name) + context.data["project_settings"] = project_settings diff --git a/client/ayon_core/plugins/publish/extract_burnin.py b/client/ayon_core/plugins/publish/extract_burnin.py index ab6353a29f..93774842ca 100644 --- a/client/ayon_core/plugins/publish/extract_burnin.py +++ b/client/ayon_core/plugins/publish/extract_burnin.py @@ -27,7 +27,7 @@ class ExtractBurnin(publish.Extractor): Extractor to create video with pre-defined burnins from existing extracted video representation. - It will work only on represenations having `burnin = True` or + It will work only on representations having `burnin = True` or `tags` including `burnin` """ @@ -125,7 +125,7 @@ class ExtractBurnin(publish.Extractor): burnin_defs = copy.deepcopy(src_burnin_defs) - # Filter output definition by `burnin` represetation key + # Filter output definition by `burnin` representation key repre_linked_burnins = [ burnin_def for burnin_def in burnin_defs @@ -194,6 +194,16 @@ class ExtractBurnin(publish.Extractor): ).format(host_name, product_type, task_name, profile)) return + burnins_per_repres = self._get_burnins_per_representations( + instance, burnin_defs + ) + if not burnins_per_repres: + self.log.debug( + "Skipped instance. No representations found matching a burnin" + "definition in: %s", burnin_defs + ) + return + burnin_options = self._get_burnin_options() # Prepare basic data for processing @@ -204,9 +214,6 @@ class ExtractBurnin(publish.Extractor): # Args that will execute the script executable_args = ["run", scriptpath] - burnins_per_repres = self._get_burnins_per_representations( - instance, burnin_defs - ) for repre, repre_burnin_defs in burnins_per_repres: # Create copy of `_burnin_data` and `_temp_data` for repre. burnin_data = copy.deepcopy(_burnin_data) @@ -371,6 +378,7 @@ class ExtractBurnin(publish.Extractor): # Prepare subprocess arguments args = list(executable_args) args.append(temporary_json_filepath) + args.append("--headless") self.log.debug("Executing: {}".format(" ".join(args))) # Run burnin script @@ -540,7 +548,7 @@ class ExtractBurnin(publish.Extractor): return burnin_data, temp_data def repres_is_valid(self, repre): - """Validation if representaion should be processed. + """Validation if representation should be processed. Args: repre (dict): Representation which should be checked. @@ -572,7 +580,7 @@ class ExtractBurnin(publish.Extractor): tags (list): Tags of processed representation. Returns: - list: Containg all burnin definitions matching entered tags. + list: Contain all burnin definitions matching entered tags. """ filtered_burnins = [] @@ -597,7 +605,7 @@ class ExtractBurnin(publish.Extractor): Store data to `temp_data` for keys "full_input_path" which is full path to source files optionally with sequence formatting, - "full_output_path" full path to otput with optionally with sequence + "full_output_path" full path to output with optionally with sequence formatting, "full_input_paths" list of all source files which will be deleted when burnin script ends, "repre_files" list of output filenames. @@ -747,7 +755,7 @@ class ExtractBurnin(publish.Extractor): profile (dict): Profile from presets matching current context. Returns: - list: Containg all valid output definitions. + list: Contain all valid output definitions. """ filtered_burnin_defs = [] @@ -768,7 +776,7 @@ class ExtractBurnin(publish.Extractor): ): self.log.debug(( "Skipped burnin definition \"{}\". Family" - " fiters ({}) does not match current instance families: {}" + " filters ({}) does not match current instance families: {}" ).format( filename_suffix, str(families_filters), str(families) )) diff --git a/client/ayon_core/plugins/publish/extract_review.py b/client/ayon_core/plugins/publish/extract_review.py index 905158c851..1891c25521 100644 --- a/client/ayon_core/plugins/publish/extract_review.py +++ b/client/ayon_core/plugins/publish/extract_review.py @@ -32,6 +32,35 @@ from ayon_core.pipeline.publish import ( from ayon_core.pipeline.publish.lib import add_repre_files_for_cleanup +def frame_to_timecode(frame: int, fps: float) -> str: + """Convert a frame number and FPS to editorial timecode (HH:MM:SS:FF). + + Unlike `ayon_core.pipeline.editorial.frames_to_timecode` this does not + rely on the `opentimelineio` package, so it can be used across hosts that + do not have it available. + + Args: + frame (int): The frame number to be converted. + fps (float): The frames per second of the video. + + Returns: + str: The timecode in HH:MM:SS:FF format. + """ + # Calculate total seconds + total_seconds = frame / fps + + # Extract hours, minutes, and seconds + hours = int(total_seconds // 3600) + minutes = int((total_seconds % 3600) // 60) + seconds = int(total_seconds % 60) + + # Adjust for non-integer FPS by rounding the remaining frames appropriately + remaining_frames = round((total_seconds - int(total_seconds)) * fps) + + # Format and return the timecode + return f"{hours:02d}:{minutes:02d}:{seconds:02d}:{remaining_frames:02d}" + + class ExtractReview(pyblish.api.InstancePlugin): """Extracting Review mov file for Ftrack @@ -390,7 +419,16 @@ class ExtractReview(pyblish.api.InstancePlugin): # add outputName to anatomy format fill_data fill_data.update({ "output": output_name, - "ext": output_ext + "ext": output_ext, + + # By adding `timecode` as data we can use it + # in the ffmpeg arguments for `--timecode` so that editorial + # like Resolve or Premiere can detect the start frame for e.g. + # review output files + "timecode": frame_to_timecode( + frame=temp_data["frame_start_handle"], + fps=float(instance.data["fps"]) + ) }) try: # temporary until oiiotool is supported cross platform @@ -619,7 +657,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # Prepare input and output filepaths self.input_output_paths(new_repre, output_def, temp_data) - # Set output frames len to 1 when ouput is single image + # Set output frames len to 1 when output is single image if ( temp_data["output_ext_is_image"] and not temp_data["output_is_sequence"] @@ -955,7 +993,7 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("New representation ext: `{}`".format(output_ext)) - # Output is image file sequence witht frames + # Output is image file sequence with frames output_ext_is_image = bool(output_ext in self.image_exts) output_is_sequence = bool( output_ext_is_image @@ -967,7 +1005,7 @@ class ExtractReview(pyblish.api.InstancePlugin): frame_end = temp_data["output_frame_end"] filename_base = "{}_{}".format(filename, filename_suffix) - # Temporary tempalte for frame filling. Example output: + # Temporary template for frame filling. Example output: # "basename.%04d.exr" when `frame_end` == 1001 repr_file = "{}.%{:0>2}d.{}".format( filename_base, len(str(frame_end)), output_ext @@ -997,7 +1035,7 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("Creating dir: {}".format(dst_staging_dir)) os.makedirs(dst_staging_dir) - # Store stagingDir to representaion + # Store stagingDir to representation new_repre["stagingDir"] = dst_staging_dir # Store paths to temp data @@ -1225,19 +1263,13 @@ class ExtractReview(pyblish.api.InstancePlugin): filters = [] # if reformat input video file is already reforamted from upstream - reformat_in_baking = bool("reformated" in new_repre["tags"]) + reformat_in_baking = ( + "reformatted" in new_repre["tags"] + # Backwards compatibility + or "reformated" in new_repre["tags"] + ) self.log.debug("reformat_in_baking: `{}`".format(reformat_in_baking)) - # Get instance data - pixel_aspect = temp_data["pixel_aspect"] - - if reformat_in_baking: - self.log.debug(( - "Using resolution from input. It is already " - "reformated from upstream process" - )) - pixel_aspect = 1 - # NOTE Skipped using instance's resolution full_input_path_single_file = temp_data["full_input_path_single_file"] try: @@ -1268,7 +1300,7 @@ class ExtractReview(pyblish.api.InstancePlugin): if reformat_in_baking: self.log.debug(( "Using resolution from input. It is already " - "reformated from upstream process" + "reformatted from upstream process" )) pixel_aspect = 1 output_width = input_width @@ -1374,7 +1406,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # Make sure output width and height is not an odd number # When this can happen: # - if output definition has set width and height with odd number - # - `instance.data` contain width and height with odd numbeer + # - `instance.data` contain width and height with odd number if output_width % 2 != 0: self.log.warning(( "Converting output width from odd to even number. {} -> {}" @@ -1555,7 +1587,7 @@ class ExtractReview(pyblish.api.InstancePlugin): custom_tags (list): Custom Tags of processed representation. Returns: - list: Containg all output definitions matching entered tags. + list: Containing all output definitions matching entered tags. """ filtered_outputs = [] @@ -1820,8 +1852,8 @@ class OverscanCrop: """ # crop=width:height:x:y - explicit start x, y position # crop=width:height - x, y are related to center by width/height - # pad=width:heigth:x:y - explicit start x, y position - # pad=width:heigth - x, y are set to 0 by default + # pad=width:height:x:y - explicit start x, y position + # pad=width:height - x, y are set to 0 by default width = self.width() height = self.height() @@ -1869,7 +1901,7 @@ class OverscanCrop: # Replace "px" (and spaces before) with single space string_value = re.sub(r"([ ]+)?px", " ", string_value) string_value = re.sub(r"([ ]+)%", "%", string_value) - # Make sure +/- sign at the beggining of string is next to number + # Make sure +/- sign at the beginning of string is next to number string_value = re.sub(r"^([\+\-])[ ]+", "\g<1>", string_value) # Make sure +/- sign in the middle has zero spaces before number under # which belongs diff --git a/client/ayon_core/plugins/publish/extract_slate_data.py b/client/ayon_core/plugins/publish/extract_slate_data.py new file mode 100644 index 0000000000..750fb5d60a --- /dev/null +++ b/client/ayon_core/plugins/publish/extract_slate_data.py @@ -0,0 +1,22 @@ +import pyblish.api + +from ayon_core.pipeline import publish + + +class ExtractSlateData(publish.Extractor): + """Add slate data for integration.""" + + label = "Slate Data" + # Offset from ExtractReviewSlate and ExtractGenerateSlate. + order = pyblish.api.ExtractorOrder + 0.49 + families = ["slate", "review"] + hosts = ["nuke", "shell"] + + def process(self, instance): + for representation in instance.data.get("representations", []): + if "slate-frame" not in representation.get("tags", []): + continue + + data = representation.get("data", {}) + data["slateFrames"] = 1 + representation["data"] = data diff --git a/client/ayon_core/plugins/publish/help/validate_containers.xml b/client/ayon_core/plugins/publish/help/validate_containers.xml index 5d18bb4c19..321e73a303 100644 --- a/client/ayon_core/plugins/publish/help/validate_containers.xml +++ b/client/ayon_core/plugins/publish/help/validate_containers.xml @@ -10,7 +10,7 @@ Scene contains one or more outdated loaded containers, eg. versions loaded into ### How to repair? Use 'Scene Inventory' and update all highlighted old container to latest OR - refresh Publish and switch 'Validate Containers' toggle on 'Options' tab. +refresh Publish and switch 'Validate Containers' toggle on 'Context' tab. WARNING: Skipping this validator will result in publishing (and probably rendering) old version of loaded assets. diff --git a/client/ayon_core/plugins/publish/integrate_hero_version.py b/client/ayon_core/plugins/publish/integrate_hero_version.py index 7969457697..8c36719b77 100644 --- a/client/ayon_core/plugins/publish/integrate_hero_version.py +++ b/client/ayon_core/plugins/publish/integrate_hero_version.py @@ -90,6 +90,9 @@ class IntegrateHeroVersion( # *but all other plugins must be sucessfully completed def process(self, instance): + if not self.is_active(instance.data): + return + self.log.debug( "--- Integration of Hero version for product `{}` begins.".format( instance.data["productName"] diff --git a/client/ayon_core/scripts/slates/slate_base/base.py b/client/ayon_core/scripts/slates/slate_base/base.py index 35ef46769c..e1648c916a 100644 --- a/client/ayon_core/scripts/slates/slate_base/base.py +++ b/client/ayon_core/scripts/slates/slate_base/base.py @@ -82,20 +82,6 @@ class BaseObj: def main_style(self): return load_default_style() - def height(self): - raise NotImplementedError( - "Attribute `height` is not implemented for <{}>".format( - self.__clas__.__name__ - ) - ) - - def width(self): - raise NotImplementedError( - "Attribute `width` is not implemented for <{}>".format( - self.__clas__.__name__ - ) - ) - def collect_data(self): return None diff --git a/client/ayon_core/settings/lib.py b/client/ayon_core/settings/lib.py index d72e4f357a..3929818d31 100644 --- a/client/ayon_core/settings/lib.py +++ b/client/ayon_core/settings/lib.py @@ -201,7 +201,7 @@ def get_current_project_settings(): Project name should be stored in environment variable `AYON_PROJECT_NAME`. This function should be used only in host context where environment variable must be set and should not happen that any part of process will - change the value of the enviornment variable. + change the value of the environment variable. """ project_name = os.environ.get("AYON_PROJECT_NAME") if not project_name: @@ -209,6 +209,3 @@ def get_current_project_settings(): "Missing context project in environemt variable `AYON_PROJECT_NAME`." ) return get_project_settings(project_name) - - - diff --git a/client/ayon_core/tools/launcher/models/actions.py b/client/ayon_core/tools/launcher/models/actions.py index 97943e6ad7..32df600c87 100644 --- a/client/ayon_core/tools/launcher/models/actions.py +++ b/client/ayon_core/tools/launcher/models/actions.py @@ -2,9 +2,11 @@ import os from ayon_core import resources from ayon_core.lib import Logger, AYONSettingsRegistry +from ayon_core.addon import AddonsManager from ayon_core.pipeline.actions import ( discover_launcher_actions, LauncherAction, + LauncherActionSelection, ) from ayon_core.pipeline.workfile import should_use_last_workfile_on_launch @@ -69,11 +71,6 @@ class ApplicationAction(LauncherAction): project_entities = {} _log = None - required_session_keys = ( - "AYON_PROJECT_NAME", - "AYON_FOLDER_PATH", - "AYON_TASK_NAME" - ) @property def log(self): @@ -81,18 +78,16 @@ class ApplicationAction(LauncherAction): self._log = Logger.get_logger(self.__class__.__name__) return self._log - def is_compatible(self, session): - for key in self.required_session_keys: - if not session.get(key): - return False + def is_compatible(self, selection): + if not selection.is_task_selected: + return False - project_name = session["AYON_PROJECT_NAME"] - project_entity = self.project_entities[project_name] + project_entity = self.project_entities[selection.project_name] apps = project_entity["attrib"].get("applications") if not apps or self.application.full_name not in apps: return False - project_settings = self.project_settings[project_name] + project_settings = self.project_settings[selection.project_name] only_available = project_settings["applications"]["only_available"] if only_available and not self.application.find_executable(): return False @@ -112,26 +107,23 @@ class ApplicationAction(LauncherAction): dialog.setDetailedText(details) dialog.exec_() - def process(self, session, **kwargs): + def process(self, selection, **kwargs): """Process the full Application action""" - from ayon_core.lib import ( - ApplictionExecutableNotFound, + from ayon_applications import ( + ApplicationExecutableNotFound, ApplicationLaunchFailed, ) - project_name = session["AYON_PROJECT_NAME"] - folder_path = session["AYON_FOLDER_PATH"] - task_name = session["AYON_TASK_NAME"] try: self.application.launch( - project_name=project_name, - folder_path=folder_path, - task_name=task_name, + project_name=selection.project_name, + folder_path=selection.folder_path, + task_name=selection.task_name, **self.data ) - except ApplictionExecutableNotFound as exc: + except ApplicationExecutableNotFound as exc: details = exc.details msg = exc.msg log_msg = str(msg) @@ -279,6 +271,8 @@ class ActionsModel: self._launcher_tool_reg = AYONSettingsRegistry("launcher_tool") + self._addons_manager = None + @property def log(self): if self._log is None: @@ -335,11 +329,11 @@ class ActionsModel: """ not_open_workfile_actions = self._get_no_last_workfile_for_context( project_name, folder_id, task_id) - session = self._prepare_session(project_name, folder_id, task_id) + selection = self._prepare_selection(project_name, folder_id, task_id) output = [] action_items = self._get_action_items(project_name) for identifier, action in self._get_action_objects().items(): - if not action.is_compatible(session): + if not action.is_compatible(selection): continue action_item = action_items[identifier] @@ -374,7 +368,7 @@ class ActionsModel: ) def trigger_action(self, project_name, folder_id, task_id, identifier): - session = self._prepare_session(project_name, folder_id, task_id) + selection = self._prepare_selection(project_name, folder_id, task_id) failed = False error_message = None action_label = identifier @@ -403,7 +397,7 @@ class ActionsModel: ) action.data["start_last_workfile"] = start_last_workfile - action.process(session) + action.process(selection) except Exception as exc: self.log.warning("Action trigger failed.", exc_info=True) failed = True @@ -419,6 +413,11 @@ class ActionsModel: } ) + def _get_addons_manager(self): + if self._addons_manager is None: + self._addons_manager = AddonsManager() + return self._addons_manager + def _get_no_last_workfile_reg_data(self): try: no_workfile_reg_data = self._launcher_tool_reg.get_item( @@ -440,29 +439,8 @@ class ActionsModel: .get(task_id, {}) ) - def _prepare_session(self, project_name, folder_id, task_id): - folder_path = None - if folder_id: - folder = self._controller.get_folder_entity( - project_name, folder_id) - if folder: - folder_path = folder["path"] - - task_name = None - if task_id: - task = self._controller.get_task_entity(project_name, task_id) - if task: - task_name = task["name"] - - return { - "AYON_PROJECT_NAME": project_name, - "AYON_FOLDER_PATH": folder_path, - "AYON_TASK_NAME": task_name, - # Deprecated - kept for backwards compatibility - "AVALON_PROJECT": project_name, - "AVALON_ASSET": folder_path, - "AVALON_TASK": task_name, - } + def _prepare_selection(self, project_name, folder_id, task_id): + return LauncherActionSelection(project_name, folder_id, task_id) def _get_discovered_action_classes(self): if self._discovered_actions is None: @@ -519,19 +497,16 @@ class ActionsModel: return action_items def _get_applications_action_classes(self): - from ayon_core.lib.applications import ( - CUSTOM_LAUNCH_APP_GROUPS, - ApplicationManager, - ) - actions = [] - manager = ApplicationManager() + addons_manager = self._get_addons_manager() + applications_addon = addons_manager.get_enabled_addon("applications") + if applications_addon is None: + return actions + + manager = applications_addon.get_applications_manager() for full_name, application in manager.applications.items(): - if ( - application.group.name in CUSTOM_LAUNCH_APP_GROUPS - or not application.enabled - ): + if not application.enabled: continue action = type( diff --git a/client/ayon_core/tools/loader/abstract.py b/client/ayon_core/tools/loader/abstract.py index 33add0213b..7a7d335092 100644 --- a/client/ayon_core/tools/loader/abstract.py +++ b/client/ayon_core/tools/loader/abstract.py @@ -871,7 +871,7 @@ class FrontendLoaderController(_BaseLoaderController): # Site sync functions @abstractmethod - def is_site_sync_enabled(self, project_name=None): + def is_sitesync_enabled(self, project_name=None): """Is site sync enabled. Site sync addon can be enabled but can be disabled per project. diff --git a/client/ayon_core/tools/loader/control.py b/client/ayon_core/tools/loader/control.py index d8562f50ca..0c9bb369c7 100644 --- a/client/ayon_core/tools/loader/control.py +++ b/client/ayon_core/tools/loader/control.py @@ -113,7 +113,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): self._products_model = ProductsModel(self) self._loader_actions_model = LoaderActionsModel(self) self._thumbnails_model = ThumbnailsModel() - self._site_sync_model = SiteSyncModel(self) + self._sitesync_model = SiteSyncModel(self) @property def log(self): @@ -149,7 +149,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): self._loader_actions_model.reset() self._projects_model.reset() self._thumbnails_model.reset() - self._site_sync_model.reset() + self._sitesync_model.reset() self._projects_model.refresh() @@ -240,7 +240,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): project_name, representation_ids) ) - action_items.extend(self._site_sync_model.get_site_sync_action_items( + action_items.extend(self._sitesync_model.get_sitesync_action_items( project_name, representation_ids) ) @@ -254,8 +254,8 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): version_ids, representation_ids ): - if self._site_sync_model.is_site_sync_action(identifier): - self._site_sync_model.trigger_action_item( + if self._sitesync_model.is_sitesync_action(identifier): + self._sitesync_model.trigger_action_item( identifier, project_name, representation_ids @@ -368,24 +368,24 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): self._loaded_products_cache.update_data(product_ids) return self._loaded_products_cache.get_data() - def is_site_sync_enabled(self, project_name=None): - return self._site_sync_model.is_site_sync_enabled(project_name) + def is_sitesync_enabled(self, project_name=None): + return self._sitesync_model.is_sitesync_enabled(project_name) def get_active_site_icon_def(self, project_name): - return self._site_sync_model.get_active_site_icon_def(project_name) + return self._sitesync_model.get_active_site_icon_def(project_name) def get_remote_site_icon_def(self, project_name): - return self._site_sync_model.get_remote_site_icon_def(project_name) + return self._sitesync_model.get_remote_site_icon_def(project_name) def get_version_sync_availability(self, project_name, version_ids): - return self._site_sync_model.get_version_sync_availability( + return self._sitesync_model.get_version_sync_availability( project_name, version_ids ) def get_representations_sync_status( self, project_name, representation_ids ): - return self._site_sync_model.get_representations_sync_status( + return self._sitesync_model.get_representations_sync_status( project_name, representation_ids ) diff --git a/client/ayon_core/tools/loader/models/__init__.py b/client/ayon_core/tools/loader/models/__init__.py index 8e640659a0..10fd3da4d3 100644 --- a/client/ayon_core/tools/loader/models/__init__.py +++ b/client/ayon_core/tools/loader/models/__init__.py @@ -1,7 +1,7 @@ from .selection import SelectionModel from .products import ProductsModel from .actions import LoaderActionsModel -from .site_sync import SiteSyncModel +from .sitesync import SiteSyncModel __all__ = ( diff --git a/client/ayon_core/tools/loader/models/site_sync.py b/client/ayon_core/tools/loader/models/sitesync.py similarity index 90% rename from client/ayon_core/tools/loader/models/site_sync.py rename to client/ayon_core/tools/loader/models/sitesync.py index a589cf7fbe..987510905b 100644 --- a/client/ayon_core/tools/loader/models/site_sync.py +++ b/client/ayon_core/tools/loader/models/sitesync.py @@ -36,7 +36,7 @@ class SiteSyncModel: self._controller = controller self._site_icons = None - self._site_sync_enabled_cache = NestedCacheItem( + self._sitesync_enabled_cache = NestedCacheItem( levels=1, lifetime=self.lifetime ) self._active_site_cache = NestedCacheItem( @@ -57,17 +57,17 @@ class SiteSyncModel: ) manager = AddonsManager() - self._site_sync_addon = manager.get("sync_server") + self._sitesync_addon = manager.get("sitesync") def reset(self): self._site_icons = None - self._site_sync_enabled_cache.reset() + self._sitesync_enabled_cache.reset() self._active_site_cache.reset() self._remote_site_cache.reset() self._version_availability_cache.reset() self._repre_status_cache.reset() - def is_site_sync_enabled(self, project_name=None): + def is_sitesync_enabled(self, project_name=None): """Site sync is enabled for a project. Returns false if site sync addon is not available or enabled @@ -82,13 +82,13 @@ class SiteSyncModel: bool: Site sync is enabled. """ - if not self._is_site_sync_addon_enabled(): + if not self._is_sitesync_addon_enabled(): return False - cache = self._site_sync_enabled_cache[project_name] + cache = self._sitesync_enabled_cache[project_name] if not cache.is_valid: enabled = True if project_name: - enabled = self._site_sync_addon.is_project_enabled( + enabled = self._sitesync_addon.is_project_enabled( project_name, single=True ) cache.update_data(enabled) @@ -107,8 +107,8 @@ class SiteSyncModel: cache = self._active_site_cache[project_name] if not cache.is_valid: site_name = None - if project_name and self._is_site_sync_addon_enabled(): - site_name = self._site_sync_addon.get_active_site(project_name) + if project_name and self._is_sitesync_addon_enabled(): + site_name = self._sitesync_addon.get_active_site(project_name) cache.update_data(site_name) return cache.get_data() @@ -125,8 +125,8 @@ class SiteSyncModel: cache = self._remote_site_cache[project_name] if not cache.is_valid: site_name = None - if project_name and self._is_site_sync_addon_enabled(): - site_name = self._site_sync_addon.get_remote_site(project_name) + if project_name and self._is_sitesync_addon_enabled(): + site_name = self._sitesync_addon.get_remote_site(project_name) cache.update_data(site_name) return cache.get_data() @@ -140,7 +140,7 @@ class SiteSyncModel: Union[dict[str, Any], None]: Site icon definition. """ - if not project_name or not self.is_site_sync_enabled(project_name): + if not project_name or not self.is_sitesync_enabled(project_name): return None active_site = self.get_active_site(project_name) return self._get_site_icon_def(project_name, active_site) @@ -155,14 +155,14 @@ class SiteSyncModel: Union[dict[str, Any], None]: Site icon definition. """ - if not project_name or not self.is_site_sync_enabled(project_name): + if not project_name or not self.is_sitesync_enabled(project_name): return None remote_site = self.get_remote_site(project_name) return self._get_site_icon_def(project_name, remote_site) def _get_site_icon_def(self, project_name, site_name): # use different icon for studio even if provider is 'local_drive' - if site_name == self._site_sync_addon.DEFAULT_SITE: + if site_name == self._sitesync_addon.DEFAULT_SITE: provider = "studio" else: provider = self._get_provider_for_site(project_name, site_name) @@ -179,7 +179,7 @@ class SiteSyncModel: dict[str, tuple[int, int]] """ - if not self.is_site_sync_enabled(project_name): + if not self.is_sitesync_enabled(project_name): return { version_id: _default_version_availability() for version_id in version_ids @@ -217,7 +217,7 @@ class SiteSyncModel: dict[str, tuple[float, float]] """ - if not self.is_site_sync_enabled(project_name): + if not self.is_sitesync_enabled(project_name): return { repre_id: _default_repre_status() for repre_id in representation_ids @@ -242,7 +242,7 @@ class SiteSyncModel: output[repre_id] = repre_cache.get_data() return output - def get_site_sync_action_items(self, project_name, representation_ids): + def get_sitesync_action_items(self, project_name, representation_ids): """ Args: @@ -253,7 +253,7 @@ class SiteSyncModel: list[ActionItem]: Actions that can be shown in loader. """ - if not self.is_site_sync_enabled(project_name): + if not self.is_sitesync_enabled(project_name): return [] repres_status = self.get_representations_sync_status( @@ -289,7 +289,7 @@ class SiteSyncModel: return action_items - def is_site_sync_action(self, identifier): + def is_sitesync_action(self, identifier): """Should be `identifier` handled by SiteSync. Args: @@ -353,22 +353,22 @@ class SiteSyncModel: ) elif identifier == REMOVE_IDENTIFIER: - self._site_sync_addon.remove_site( + self._sitesync_addon.remove_site( project_name, repre_id, active_site, remove_local_files=True ) - def _is_site_sync_addon_enabled(self): + def _is_sitesync_addon_enabled(self): """ Returns: bool: Site sync addon is enabled. """ - if self._site_sync_addon is None: + if self._sitesync_addon is None: return False - return self._site_sync_addon.enabled + return self._sitesync_addon.enabled def _get_provider_for_site(self, project_name, site_name): """Provider for a site. @@ -381,9 +381,9 @@ class SiteSyncModel: Union[str, None]: Provider name. """ - if not self._is_site_sync_addon_enabled(): + if not self._is_sitesync_addon_enabled(): return None - return self._site_sync_addon.get_provider_for_site( + return self._sitesync_addon.get_provider_for_site( project_name, site_name ) @@ -398,7 +398,7 @@ class SiteSyncModel: return None if self._site_icons is None: - self._site_icons = self._site_sync_addon.get_site_icons() + self._site_icons = self._sitesync_addon.get_site_icons() return self._site_icons.get(provider) def _refresh_version_availability(self, project_name, version_ids): @@ -406,7 +406,7 @@ class SiteSyncModel: return project_cache = self._version_availability_cache[project_name] - avail_by_id = self._site_sync_addon.get_version_availability( + avail_by_id = self._sitesync_addon.get_version_availability( project_name, version_ids, self.get_active_site(project_name), @@ -425,7 +425,7 @@ class SiteSyncModel: return project_cache = self._repre_status_cache[project_name] status_by_repre_id = ( - self._site_sync_addon.get_representations_sync_state( + self._sitesync_addon.get_representations_sync_state( project_name, representation_ids, self.get_active_site(project_name), @@ -496,7 +496,7 @@ class SiteSyncModel: ) def _add_site(self, project_name, repre_entity, site_name, product_type): - self._site_sync_addon.add_site( + self._sitesync_addon.add_site( project_name, repre_entity["id"], site_name, force=True ) @@ -513,7 +513,7 @@ class SiteSyncModel: try: print("Adding {} to linked representation: {}".format( site_name, link_repre_id)) - self._site_sync_addon.add_site( + self._sitesync_addon.add_site( project_name, link_repre_id, site_name, diff --git a/client/ayon_core/tools/loader/ui/products_model.py b/client/ayon_core/tools/loader/ui/products_model.py index c51172849a..b465679c3b 100644 --- a/client/ayon_core/tools/loader/ui/products_model.py +++ b/client/ayon_core/tools/loader/ui/products_model.py @@ -73,7 +73,7 @@ class ProductsModel(QtGui.QStandardItemModel): published_time_col = column_labels.index("Time") folders_label_col = column_labels.index("Folder") in_scene_col = column_labels.index("In scene") - site_sync_avail_col = column_labels.index("Availability") + sitesync_avail_col = column_labels.index("Availability") def __init__(self, controller): super(ProductsModel, self).__init__() @@ -284,7 +284,13 @@ class ProductsModel(QtGui.QStandardItemModel): model_item.setData(label, QtCore.Qt.DisplayRole) return model_item - def _set_version_data_to_product_item(self, model_item, version_item): + def _set_version_data_to_product_item( + self, + model_item, + version_item, + repre_count_by_version_id=None, + sync_availability_by_version_id=None, + ): """ Args: @@ -292,6 +298,10 @@ class ProductsModel(QtGui.QStandardItemModel): from version item. version_item (VersionItem): Item from entities model with information about version. + repre_count_by_version_id (Optional[str, int]): Mapping of + representation count by version id. + sync_availability_by_version_id (Optional[str, Tuple[int, int]]): + Mapping of sync availability by version id. """ model_item.setData(version_item.version_id, VERSION_ID_ROLE) @@ -312,12 +322,20 @@ class ProductsModel(QtGui.QStandardItemModel): # TODO call site sync methods for all versions at once project_name = self._last_project_name version_id = version_item.version_id - repre_count = self._controller.get_versions_representation_count( - project_name, [version_id] - )[version_id] - active, remote = self._controller.get_version_sync_availability( - project_name, [version_id] - )[version_id] + if repre_count_by_version_id is None: + repre_count_by_version_id = ( + self._controller.get_versions_representation_count( + project_name, [version_id] + ) + ) + if sync_availability_by_version_id is None: + sync_availability_by_version_id = ( + self._controller.get_version_sync_availability( + project_name, [version_id] + ) + ) + repre_count = repre_count_by_version_id[version_id] + active, remote = sync_availability_by_version_id[version_id] model_item.setData(repre_count, REPRESENTATIONS_COUNT_ROLE) model_item.setData(active, SYNC_ACTIVE_SITE_AVAILABILITY) @@ -327,7 +345,9 @@ class ProductsModel(QtGui.QStandardItemModel): self, product_item, active_site_icon, - remote_site_icon + remote_site_icon, + repre_count_by_version_id, + sync_availability_by_version_id, ): model_item = self._items_by_id.get(product_item.product_id) versions = list(product_item.version_items.values()) @@ -357,7 +377,12 @@ class ProductsModel(QtGui.QStandardItemModel): model_item.setData(active_site_icon, ACTIVE_SITE_ICON_ROLE) model_item.setData(remote_site_icon, REMOTE_SITE_ICON_ROLE) - self._set_version_data_to_product_item(model_item, last_version) + self._set_version_data_to_product_item( + model_item, + last_version, + repre_count_by_version_id, + sync_availability_by_version_id, + ) return model_item def get_last_project_name(self): @@ -387,6 +412,24 @@ class ProductsModel(QtGui.QStandardItemModel): product_item.product_id: product_item for product_item in product_items } + last_version_id_by_product_id = {} + for product_item in product_items: + versions = list(product_item.version_items.values()) + versions.sort() + last_version = versions[-1] + last_version_id_by_product_id[product_item.product_id] = ( + last_version.version_id + ) + + version_ids = set(last_version_id_by_product_id.values()) + repre_count_by_version_id = self._controller.get_versions_representation_count( + project_name, version_ids + ) + sync_availability_by_version_id = ( + self._controller.get_version_sync_availability( + project_name, version_ids + ) + ) # Prepare product groups product_name_matches_by_group = collections.defaultdict(dict) @@ -443,6 +486,8 @@ class ProductsModel(QtGui.QStandardItemModel): product_item, active_site_icon, remote_site_icon, + repre_count_by_version_id, + sync_availability_by_version_id, ) new_items.append(item) @@ -463,6 +508,8 @@ class ProductsModel(QtGui.QStandardItemModel): product_item, active_site_icon, remote_site_icon, + repre_count_by_version_id, + sync_availability_by_version_id, ) new_merged_items.append(item) merged_product_types.add(product_item.product_type) diff --git a/client/ayon_core/tools/loader/ui/products_widget.py b/client/ayon_core/tools/loader/ui/products_widget.py index 3025ec18bd..d9f027153e 100644 --- a/client/ayon_core/tools/loader/ui/products_widget.py +++ b/client/ayon_core/tools/loader/ui/products_widget.py @@ -139,9 +139,9 @@ class ProductsWidget(QtWidgets.QWidget): products_view.setItemDelegateForColumn( products_model.in_scene_col, in_scene_delegate) - site_sync_delegate = SiteSyncDelegate() + sitesync_delegate = SiteSyncDelegate() products_view.setItemDelegateForColumn( - products_model.site_sync_avail_col, site_sync_delegate) + products_model.sitesync_avail_col, sitesync_delegate) main_layout = QtWidgets.QHBoxLayout(self) main_layout.setContentsMargins(0, 0, 0, 0) @@ -176,7 +176,7 @@ class ProductsWidget(QtWidgets.QWidget): self._version_delegate = version_delegate self._time_delegate = time_delegate self._in_scene_delegate = in_scene_delegate - self._site_sync_delegate = site_sync_delegate + self._sitesync_delegate = sitesync_delegate self._selected_project_name = None self._selected_folder_ids = set() @@ -192,8 +192,8 @@ class ProductsWidget(QtWidgets.QWidget): products_model.in_scene_col, not controller.is_loaded_products_supported() ) - self._set_site_sync_visibility( - self._controller.is_site_sync_enabled() + self._set_sitesync_visibility( + self._controller.is_sitesync_enabled() ) def set_name_filter(self, name): @@ -229,10 +229,10 @@ class ProductsWidget(QtWidgets.QWidget): def refresh(self): self._refresh_model() - def _set_site_sync_visibility(self, site_sync_enabled): + def _set_sitesync_visibility(self, sitesync_enabled): self._products_view.setColumnHidden( - self._products_model.site_sync_avail_col, - not site_sync_enabled + self._products_model.sitesync_avail_col, + not sitesync_enabled ) def _fill_version_editor(self): @@ -395,10 +395,10 @@ class ProductsWidget(QtWidgets.QWidget): def _on_folders_selection_change(self, event): project_name = event["project_name"] - site_sync_enabled = self._controller.is_site_sync_enabled( + sitesync_enabled = self._controller.is_sitesync_enabled( project_name ) - self._set_site_sync_visibility(site_sync_enabled) + self._set_sitesync_visibility(sitesync_enabled) self._selected_project_name = project_name self._selected_folder_ids = event["folder_ids"] self._refresh_model() diff --git a/client/ayon_core/tools/loader/ui/repres_widget.py b/client/ayon_core/tools/loader/ui/repres_widget.py index 3b6b8f94bf..d19ad306a3 100644 --- a/client/ayon_core/tools/loader/ui/repres_widget.py +++ b/client/ayon_core/tools/loader/ui/repres_widget.py @@ -307,8 +307,8 @@ class RepresentationsWidget(QtWidgets.QWidget): self._repre_model = repre_model self._repre_proxy_model = repre_proxy_model - self._set_site_sync_visibility( - self._controller.is_site_sync_enabled() + self._set_sitesync_visibility( + self._controller.is_sitesync_enabled() ) self._set_multiple_folders_selected(False) @@ -320,19 +320,19 @@ class RepresentationsWidget(QtWidgets.QWidget): def _on_project_change(self, event): self._selected_project_name = event["project_name"] - site_sync_enabled = self._controller.is_site_sync_enabled( + sitesync_enabled = self._controller.is_sitesync_enabled( self._selected_project_name ) - self._set_site_sync_visibility(site_sync_enabled) + self._set_sitesync_visibility(sitesync_enabled) - def _set_site_sync_visibility(self, site_sync_enabled): + def _set_sitesync_visibility(self, sitesync_enabled): self._repre_view.setColumnHidden( self._repre_model.active_site_column, - not site_sync_enabled + not sitesync_enabled ) self._repre_view.setColumnHidden( self._repre_model.remote_site_column, - not site_sync_enabled + not sitesync_enabled ) def _set_multiple_folders_selected(self, selected_multiple_folders): diff --git a/client/ayon_core/tools/publisher/control_qt.py b/client/ayon_core/tools/publisher/control_qt.py index ee08899cac..bef3a5af3b 100644 --- a/client/ayon_core/tools/publisher/control_qt.py +++ b/client/ayon_core/tools/publisher/control_qt.py @@ -343,8 +343,9 @@ class QtRemotePublishController(BasePublisherController): @abstractmethod def _send_instance_changes_to_client(self): - instance_changes = self._get_instance_changes_for_client() - # Implement to send 'instance_changes' value to client + # TODO Implement to send 'instance_changes' value to client + # instance_changes = self._get_instance_changes_for_client() + pass @abstractmethod def save_changes(self): diff --git a/client/ayon_core/tools/sceneinventory/control.py b/client/ayon_core/tools/sceneinventory/control.py index 77f4d60b22..592113455c 100644 --- a/client/ayon_core/tools/sceneinventory/control.py +++ b/client/ayon_core/tools/sceneinventory/control.py @@ -28,7 +28,7 @@ class SceneInventoryController: self._current_folder_id = None self._current_folder_set = False - self._site_sync_model = SiteSyncModel(self) + self._sitesync_model = SiteSyncModel(self) # Switch dialog requirements self._hierarchy_model = HierarchyModel(self) self._event_system = self._create_event_system() @@ -47,7 +47,7 @@ class SceneInventoryController: self._current_folder_id = None self._current_folder_set = False - self._site_sync_model.reset() + self._sitesync_model.reset() self._hierarchy_model.reset() def get_current_context(self): @@ -89,22 +89,22 @@ class SceneInventoryController: return [] # Site Sync methods - def is_sync_server_enabled(self): - return self._site_sync_model.is_sync_server_enabled() + def is_sitesync_enabled(self): + return self._sitesync_model.is_sitesync_enabled() def get_sites_information(self): - return self._site_sync_model.get_sites_information() + return self._sitesync_model.get_sites_information() def get_site_provider_icons(self): - return self._site_sync_model.get_site_provider_icons() + return self._sitesync_model.get_site_provider_icons() def get_representations_site_progress(self, representation_ids): - return self._site_sync_model.get_representations_site_progress( + return self._sitesync_model.get_representations_site_progress( representation_ids ) def resync_representations(self, representation_ids, site_type): - return self._site_sync_model.resync_representations( + return self._sitesync_model.resync_representations( representation_ids, site_type ) diff --git a/client/ayon_core/tools/sceneinventory/models/__init__.py b/client/ayon_core/tools/sceneinventory/models/__init__.py index c861d3c1a0..f840a45aa8 100644 --- a/client/ayon_core/tools/sceneinventory/models/__init__.py +++ b/client/ayon_core/tools/sceneinventory/models/__init__.py @@ -1,4 +1,4 @@ -from .site_sync import SiteSyncModel +from .sitesync import SiteSyncModel __all__ = ( diff --git a/client/ayon_core/tools/sceneinventory/models/site_sync.py b/client/ayon_core/tools/sceneinventory/models/sitesync.py similarity index 75% rename from client/ayon_core/tools/sceneinventory/models/site_sync.py rename to client/ayon_core/tools/sceneinventory/models/sitesync.py index 7f09f2b25b..1a1f08bf02 100644 --- a/client/ayon_core/tools/sceneinventory/models/site_sync.py +++ b/client/ayon_core/tools/sceneinventory/models/sitesync.py @@ -9,30 +9,30 @@ class SiteSyncModel: def __init__(self, controller): self._controller = controller - self._sync_server_module = NOT_SET - self._sync_server_enabled = None + self._sitesync_addon = NOT_SET + self._sitesync_enabled = None self._active_site = NOT_SET self._remote_site = NOT_SET self._active_site_provider = NOT_SET self._remote_site_provider = NOT_SET def reset(self): - self._sync_server_module = NOT_SET - self._sync_server_enabled = None + self._sitesync_addon = NOT_SET + self._sitesync_enabled = None self._active_site = NOT_SET self._remote_site = NOT_SET self._active_site_provider = NOT_SET self._remote_site_provider = NOT_SET - def is_sync_server_enabled(self): + def is_sitesync_enabled(self): """Site sync is enabled. Returns: bool: Is enabled or not. """ - self._cache_sync_server_module() - return self._sync_server_enabled + self._cache_sitesync_addon() + return self._sitesync_enabled def get_site_provider_icons(self): """Icon paths per provider. @@ -41,10 +41,10 @@ class SiteSyncModel: dict[str, str]: Path by provider name. """ - if not self.is_sync_server_enabled(): + if not self.is_sitesync_enabled(): return {} - site_sync_addon = self._get_sync_server_module() - return site_sync_addon.get_site_icons() + sitesync_addon = self._get_sitesync_addon() + return sitesync_addon.get_site_icons() def get_sites_information(self): return { @@ -65,11 +65,11 @@ class SiteSyncModel: } for repre_id in representation_ids } - if not self.is_sync_server_enabled(): + if not self.is_sitesync_enabled(): return output project_name = self._controller.get_current_project_name() - site_sync = self._get_sync_server_module() + sitesync_addon = self._get_sitesync_addon() repre_entities = ayon_api.get_representations( project_name, representation_ids ) @@ -78,7 +78,7 @@ class SiteSyncModel: for repre_entity in repre_entities: repre_output = output[repre_entity["id"]] - result = site_sync.get_progress_for_repre( + result = sitesync_addon.get_progress_for_repre( repre_entity, active_site, remote_site ) repre_output["active_site"] = result[active_site] @@ -95,7 +95,7 @@ class SiteSyncModel: """ project_name = self._controller.get_current_project_name() - site_sync = self._get_sync_server_module() + sitesync_addon = self._get_sitesync_addon() active_site = self._get_active_site() remote_site = self._get_remote_site() progress = self.get_representations_site_progress( @@ -115,22 +115,22 @@ class SiteSyncModel: site = remote_site if check_progress == 1: - site_sync.add_site( + sitesync_addon.add_site( project_name, repre_id, site, force=True ) - def _get_sync_server_module(self): - self._cache_sync_server_module() - return self._sync_server_module + def _get_sitesync_addon(self): + self._cache_sitesync_addon() + return self._sitesync_addon - def _cache_sync_server_module(self): - if self._sync_server_module is not NOT_SET: - return self._sync_server_module + def _cache_sitesync_addon(self): + if self._sitesync_addon is not NOT_SET: + return self._sitesync_addon manager = AddonsManager() - site_sync = manager.get("sync_server") - sync_enabled = site_sync is not None and site_sync.enabled - self._sync_server_module = site_sync - self._sync_server_enabled = sync_enabled + sitesync_addon = manager.get("sitesync") + sync_enabled = sitesync_addon is not None and sitesync_addon.enabled + self._sitesync_addon = sitesync_addon + self._sitesync_enabled = sync_enabled def _get_active_site(self): if self._active_site is NOT_SET: @@ -157,19 +157,19 @@ class SiteSyncModel: remote_site = None active_site_provider = None remote_site_provider = None - if self.is_sync_server_enabled(): - site_sync = self._get_sync_server_module() + if self.is_sitesync_enabled(): + sitesync_addon = self._get_sitesync_addon() project_name = self._controller.get_current_project_name() - active_site = site_sync.get_active_site(project_name) - remote_site = site_sync.get_remote_site(project_name) + active_site = sitesync_addon.get_active_site(project_name) + remote_site = sitesync_addon.get_remote_site(project_name) active_site_provider = "studio" remote_site_provider = "studio" if active_site != "studio": - active_site_provider = site_sync.get_provider_for_site( + active_site_provider = sitesync_addon.get_provider_for_site( project_name, active_site ) if remote_site != "studio": - remote_site_provider = site_sync.get_provider_for_site( + remote_site_provider = sitesync_addon.get_provider_for_site( project_name, remote_site ) diff --git a/client/ayon_core/tools/sceneinventory/view.py b/client/ayon_core/tools/sceneinventory/view.py index d576bdc139..5cbd4daf70 100644 --- a/client/ayon_core/tools/sceneinventory/view.py +++ b/client/ayon_core/tools/sceneinventory/view.py @@ -311,9 +311,9 @@ class SceneInventoryView(QtWidgets.QTreeView): menu.addAction(remove_action) - self._handle_sync_server(menu, repre_ids) + self._handle_sitesync(menu, repre_ids) - def _handle_sync_server(self, menu, repre_ids): + def _handle_sitesync(self, menu, repre_ids): """Adds actions for download/upload when SyncServer is enabled Args: @@ -324,7 +324,7 @@ class SceneInventoryView(QtWidgets.QTreeView): (OptionMenu) """ - if not self._controller.is_sync_server_enabled(): + if not self._controller.is_sitesync_enabled(): return menu.addSeparator() diff --git a/client/ayon_core/tools/sceneinventory/window.py b/client/ayon_core/tools/sceneinventory/window.py index 9584524edd..555db3a17c 100644 --- a/client/ayon_core/tools/sceneinventory/window.py +++ b/client/ayon_core/tools/sceneinventory/window.py @@ -70,7 +70,7 @@ class SceneInventoryWindow(QtWidgets.QDialog): view = SceneInventoryView(controller, self) view.setModel(proxy) - sync_enabled = controller.is_sync_server_enabled() + sync_enabled = controller.is_sitesync_enabled() view.setColumnHidden(model.active_site_col, not sync_enabled) view.setColumnHidden(model.remote_site_col, not sync_enabled) diff --git a/client/ayon_core/tools/tray/tray.py b/client/ayon_core/tools/tray/tray.py index 3c6c529be8..957518afe4 100644 --- a/client/ayon_core/tools/tray/tray.py +++ b/client/ayon_core/tools/tray/tray.py @@ -552,7 +552,7 @@ class TrayStarter(QtCore.QObject): def main(): app = get_ayon_qt_app() - starter = TrayStarter(app) + starter = TrayStarter(app) # noqa F841 if not is_running_from_build() and os.name == "nt": import ctypes diff --git a/client/ayon_core/tools/utils/color_widgets/color_inputs.py b/client/ayon_core/tools/utils/color_widgets/color_inputs.py index 9c8e7b92e8..795b80fc1e 100644 --- a/client/ayon_core/tools/utils/color_widgets/color_inputs.py +++ b/client/ayon_core/tools/utils/color_widgets/color_inputs.py @@ -562,11 +562,11 @@ class HSLInputs(QtWidgets.QWidget): return self._block_changes = True - h, s, l, _ = self.color.getHsl() + hue, sat, lum, _ = self.color.getHsl() - self.input_hue.setValue(h) - self.input_sat.setValue(s) - self.input_light.setValue(l) + self.input_hue.setValue(hue) + self.input_sat.setValue(sat) + self.input_light.setValue(lum) self._block_changes = False diff --git a/client/ayon_core/tools/utils/widgets.py b/client/ayon_core/tools/utils/widgets.py index 1d4f85246f..21cab5d682 100644 --- a/client/ayon_core/tools/utils/widgets.py +++ b/client/ayon_core/tools/utils/widgets.py @@ -578,7 +578,8 @@ class OptionalAction(QtWidgets.QWidgetAction): def set_option_tip(self, options): sep = "\n\n" if not options or not isinstance(options[0], AbstractAttrDef): - mak = (lambda opt: opt["name"] + " :\n " + opt["help"]) + def mak(opt): + return opt["name"] + " :\n " + opt["help"] self.option_tip = sep.join(mak(opt) for opt in options) return diff --git a/client/ayon_core/tools/workfiles/control.py b/client/ayon_core/tools/workfiles/control.py index 7fa7af1662..3048e6be94 100644 --- a/client/ayon_core/tools/workfiles/control.py +++ b/client/ayon_core/tools/workfiles/control.py @@ -659,16 +659,7 @@ class BaseWorkfileController( folder_id != self.get_current_folder_id() or task_name != self.get_current_task_name() ): - folder_entity = ayon_api.get_folder_by_id( - event_data["project_name"], - event_data["folder_id"], - ) - task_entity = ayon_api.get_task_by_name( - event_data["project_name"], - event_data["folder_id"], - event_data["task_name"] - ) - change_current_context(folder_entity, task_entity) + self._change_current_context(project_name, folder_id, task_id) self._host_open_workfile(filepath) @@ -710,16 +701,8 @@ class BaseWorkfileController( folder_id != self.get_current_folder_id() or task_name != self.get_current_task_name() ): - folder_entity = ayon_api.get_folder_by_id( - project_name, folder["id"] - ) - task_entity = ayon_api.get_task_by_name( - project_name, folder["id"], task_name - ) - change_current_context( - folder_entity, - task_entity, - template_key=template_key + self._change_current_context( + project_name, folder_id, task_id, template_key ) # Save workfile @@ -744,4 +727,18 @@ class BaseWorkfileController( # Trigger after save events emit_event("workfile.save.after", event_data, source="workfiles.tool") - self.reset() + + def _change_current_context( + self, project_name, folder_id, task_id, template_key=None + ): + # Change current context + folder_entity = self.get_folder_entity(project_name, folder_id) + task_entity = self.get_task_entity(project_name, task_id) + change_current_context( + folder_entity, + task_entity, + template_key=template_key + ) + self._current_folder_id = folder_entity["id"] + self._current_folder_path = folder_entity["path"] + self._current_task_name = task_entity["name"] diff --git a/client/ayon_core/version.py b/client/ayon_core/version.py index f3ad9713d5..a60de0493a 100644 --- a/client/ayon_core/version.py +++ b/client/ayon_core/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring AYON core addon version.""" -__version__ = "0.3.0-dev.1" +__version__ = "0.3.1-dev.1" diff --git a/client/ayon_core/widgets/__init__.py b/client/ayon_core/widgets/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/client/ayon_core/widgets/password_dialog.py b/client/ayon_core/widgets/password_dialog.py deleted file mode 100644 index a4c50128ff..0000000000 --- a/client/ayon_core/widgets/password_dialog.py +++ /dev/null @@ -1,33 +0,0 @@ -# TODO remove - kept for kitsu addon which imported it -from qtpy import QtWidgets, QtCore, QtGui - - -class PressHoverButton(QtWidgets.QPushButton): - """ - Deprecated: - Use `openpype.tools.utils.PressHoverButton` instead. - """ - _mouse_pressed = False - _mouse_hovered = False - change_state = QtCore.Signal(bool) - - def mousePressEvent(self, event): - self._mouse_pressed = True - self._mouse_hovered = True - self.change_state.emit(self._mouse_hovered) - super(PressHoverButton, self).mousePressEvent(event) - - def mouseReleaseEvent(self, event): - self._mouse_pressed = False - self._mouse_hovered = False - self.change_state.emit(self._mouse_hovered) - super(PressHoverButton, self).mouseReleaseEvent(event) - - def mouseMoveEvent(self, event): - mouse_pos = self.mapFromGlobal(QtGui.QCursor.pos()) - under_mouse = self.rect().contains(mouse_pos) - if under_mouse != self._mouse_hovered: - self._mouse_hovered = under_mouse - self.change_state.emit(self._mouse_hovered) - - super(PressHoverButton, self).mouseMoveEvent(event) diff --git a/client/pyproject.toml b/client/pyproject.toml index 7b4329a31a..1a0ad7e5f2 100644 --- a/client/pyproject.toml +++ b/client/pyproject.toml @@ -4,19 +4,19 @@ description="AYON core addon." [tool.poetry.dependencies] python = ">=3.9.1,<3.10" -aiohttp_json_rpc = "*" # TVPaint server -aiohttp-middlewares = "^2.0.0" -wsrpc_aiohttp = "^3.1.1" # websocket server -Click = "^8" clique = "1.6.*" jsonschema = "^2.6.0" pyblish-base = "^1.8.11" -pynput = "^1.7.2" # Timers manager - TODO remove speedcopy = "^2.1" six = "^1.15" qtawesome = "0.7.3" [ayon.runtimeDependencies] +aiohttp_json_rpc = "*" # TVPaint server +aiohttp-middlewares = "^2.0.0" +wsrpc_aiohttp = "^3.1.1" # websocket server +Click = "^8" OpenTimelineIO = "0.14.1" opencolorio = "2.2.1" Pillow = "9.5.0" +pynput = "^1.7.2" # Timers manager - TODO remove diff --git a/package.py b/package.py index 470bbf256b..79450d029f 100644 --- a/package.py +++ b/package.py @@ -1,6 +1,6 @@ name = "core" title = "Core" -version = "0.3.0-dev.1" +version = "0.3.1-dev.1" client_dir = "ayon_core" diff --git a/pyproject.toml b/pyproject.toml index ee124ddc2d..dc8b312364 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ [tool.poetry] name = "ayon-core" -version = "0.3.0" +version = "0.3.1" description = "" authors = ["Ynput Team "] readme = "README.md" @@ -92,7 +92,8 @@ line-ending = "auto" [tool.codespell] # Ignore words that are not in the dictionary. -ignore-words-list = "ayon,ynput" +ignore-words-list = "ayon,ynput,parms,parm,hda,developpement" + skip = "./.*,./package/*,*/vendor/*,*/unreal/integration/*,*/aftereffects/api/extension/js/libs/*" count = true quiet-level = 3 diff --git a/server/settings/tools.py b/server/settings/tools.py index 488d27e8f1..fb8430a71c 100644 --- a/server/settings/tools.py +++ b/server/settings/tools.py @@ -173,6 +173,7 @@ def _product_types_enum(): "rig", "setdress", "take", + "usd", "usdShade", "vdbcache", "vrayproxy", diff --git a/server_addon/applications/client/ayon_applications/__init__.py b/server_addon/applications/client/ayon_applications/__init__.py new file mode 100644 index 0000000000..c9b72f9914 --- /dev/null +++ b/server_addon/applications/client/ayon_applications/__init__.py @@ -0,0 +1,59 @@ +from .constants import ( + APPLICATIONS_ADDON_ROOT, + DEFAULT_ENV_SUBGROUP, + PLATFORM_NAMES, +) +from .exceptions import ( + ApplicationNotFound, + ApplicationExecutableNotFound, + ApplicationLaunchFailed, + MissingRequiredKey, +) +from .defs import ( + LaunchTypes, + ApplicationExecutable, + UndefinedApplicationExecutable, + ApplicationGroup, + Application, + EnvironmentToolGroup, + EnvironmentTool, +) +from .hooks import ( + LaunchHook, + PreLaunchHook, + PostLaunchHook, +) +from .manager import ( + ApplicationManager, + ApplicationLaunchContext, +) +from .addon import ApplicationsAddon + + +__all__ = ( + "APPLICATIONS_ADDON_ROOT", + "DEFAULT_ENV_SUBGROUP", + "PLATFORM_NAMES", + + "ApplicationNotFound", + "ApplicationExecutableNotFound", + "ApplicationLaunchFailed", + "MissingRequiredKey", + + "LaunchTypes", + "ApplicationExecutable", + "UndefinedApplicationExecutable", + "ApplicationGroup", + "Application", + "EnvironmentToolGroup", + "EnvironmentTool", + + "LaunchHook", + "PreLaunchHook", + "PostLaunchHook", + + "ApplicationManager", + "ApplicationLaunchContext", + + "ApplicationsAddon", +) diff --git a/server_addon/applications/client/ayon_applications/addon.py b/server_addon/applications/client/ayon_applications/addon.py new file mode 100644 index 0000000000..0f1b68af0e --- /dev/null +++ b/server_addon/applications/client/ayon_applications/addon.py @@ -0,0 +1,173 @@ +import os +import json + +from ayon_core.addon import AYONAddon, IPluginPaths, click_wrap + +from .constants import APPLICATIONS_ADDON_ROOT +from .defs import LaunchTypes +from .manager import ApplicationManager + + +class ApplicationsAddon(AYONAddon, IPluginPaths): + name = "applications" + + def initialize(self, settings): + # TODO remove when addon is removed from ayon-core + self.enabled = self.name in settings + + def get_app_environments_for_context( + self, + project_name, + folder_path, + task_name, + full_app_name, + env_group=None, + launch_type=None, + env=None, + ): + """Calculate environment variables for launch context. + + Args: + project_name (str): Project name. + folder_path (str): Folder path. + task_name (str): Task name. + full_app_name (str): Full application name. + env_group (Optional[str]): Environment group. + launch_type (Optional[str]): Launch type. + env (Optional[dict[str, str]]): Environment variables to update. + + Returns: + dict[str, str]: Environment variables for context. + + """ + from ayon_applications.utils import get_app_environments_for_context + + if not full_app_name: + return {} + + return get_app_environments_for_context( + project_name, + folder_path, + task_name, + full_app_name, + env_group=env_group, + launch_type=launch_type, + env=env, + addons_manager=self.manager + ) + + def get_farm_publish_environment_variables( + self, + project_name, + folder_path, + task_name, + full_app_name=None, + env_group=None, + ): + """Calculate environment variables for farm publish. + + Args: + project_name (str): Project name. + folder_path (str): Folder path. + task_name (str): Task name. + env_group (Optional[str]): Environment group. + full_app_name (Optional[str]): Full application name. Value from + environment variable 'AYON_APP_NAME' is used if 'None' is + passed. + + Returns: + dict[str, str]: Environment variables for farm publish. + + """ + if full_app_name is None: + full_app_name = os.getenv("AYON_APP_NAME") + + return self.get_app_environments_for_context( + project_name, + folder_path, + task_name, + full_app_name, + env_group=env_group, + launch_type=LaunchTypes.farm_publish + ) + + def get_applications_manager(self, settings=None): + """Get applications manager. + + Args: + settings (Optional[dict]): Studio/project settings. + + Returns: + ApplicationManager: Applications manager. + + """ + return ApplicationManager(settings) + + def get_plugin_paths(self): + return { + "publish": [ + os.path.join(APPLICATIONS_ADDON_ROOT, "plugins", "publish") + ] + } + + # --- CLI --- + def cli(self, addon_click_group): + main_group = click_wrap.group( + self._cli_main, name=self.name, help="Applications addon" + ) + ( + main_group.command( + self._cli_extract_environments, + name="extractenvironments", + help=( + "Extract environment variables for context into json file" + ) + ) + .argument("output_json_path") + .option("--project", help="Project name", default=None) + .option("--folder", help="Folder path", default=None) + .option("--task", help="Task name", default=None) + .option("--app", help="Application name", default=None) + .option( + "--envgroup", + help="Environment group (e.g. \"farm\")", + default=None + ) + ) + # Convert main command to click object and add it to parent group + addon_click_group.add_command( + main_group.to_click_obj() + ) + + def _cli_main(self): + pass + + def _cli_extract_environments( + self, output_json_path, project, folder, task, app, envgroup + ): + """Produces json file with environment based on project and app. + + Called by farm integration to propagate environment into farm jobs. + + Args: + output_json_path (str): Output json file path. + project (str): Project name. + folder (str): Folder path. + task (str): Task name. + app (str): Full application name e.g. 'maya/2024'. + envgroup (str): Environment group. + + """ + if all((project, folder, task, app)): + env = self.get_farm_publish_environment_variables( + project, folder, task, app, env_group=envgroup, + ) + else: + env = os.environ.copy() + + output_dir = os.path.dirname(output_json_path) + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + with open(output_json_path, "w") as file_stream: + json.dump(env, file_stream, indent=4) diff --git a/server_addon/applications/client/ayon_applications/constants.py b/server_addon/applications/client/ayon_applications/constants.py new file mode 100644 index 0000000000..92c8f4f254 --- /dev/null +++ b/server_addon/applications/client/ayon_applications/constants.py @@ -0,0 +1,6 @@ +import os + +APPLICATIONS_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__)) + +PLATFORM_NAMES = {"windows", "linux", "darwin"} +DEFAULT_ENV_SUBGROUP = "standard" diff --git a/server_addon/applications/client/ayon_applications/defs.py b/server_addon/applications/client/ayon_applications/defs.py new file mode 100644 index 0000000000..5cc36041a1 --- /dev/null +++ b/server_addon/applications/client/ayon_applications/defs.py @@ -0,0 +1,404 @@ +import os +import platform +import json +import copy + +from ayon_core.lib import find_executable + + +class LaunchTypes: + """Launch types are filters for pre/post-launch hooks. + + Please use these variables in case they'll change values. + """ + + # Local launch - application is launched on local machine + local = "local" + # Farm render job - application is on farm + farm_render = "farm-render" + # Farm publish job - integration post-render job + farm_publish = "farm-publish" + # Remote launch - application is launched on remote machine from which + # can be started publishing + remote = "remote" + # Automated launch - application is launched with automated publishing + automated = "automated" + + +class ApplicationExecutable: + """Representation of executable loaded from settings.""" + + def __init__(self, executable): + # Try to format executable with environments + try: + executable = executable.format(**os.environ) + except Exception: + pass + + # On MacOS check if exists path to executable when ends with `.app` + # - it is common that path will lead to "/Applications/Blender" but + # real path is "/Applications/Blender.app" + if platform.system().lower() == "darwin": + executable = self.macos_executable_prep(executable) + + self.executable_path = executable + + def __str__(self): + return self.executable_path + + def __repr__(self): + return "<{}> {}".format(self.__class__.__name__, self.executable_path) + + @staticmethod + def macos_executable_prep(executable): + """Try to find full path to executable file. + + Real executable is stored in '*.app/Contents/MacOS/'. + + Having path to '*.app' gives ability to read it's plist info and + use "CFBundleExecutable" key from plist to know what is "executable." + + Plist is stored in '*.app/Contents/Info.plist'. + + This is because some '*.app' directories don't have same permissions + as real executable. + """ + # Try to find if there is `.app` file + if not os.path.exists(executable): + _executable = executable + ".app" + if os.path.exists(_executable): + executable = _executable + + # Try to find real executable if executable has `Contents` subfolder + contents_dir = os.path.join(executable, "Contents") + if os.path.exists(contents_dir): + executable_filename = None + # Load plist file and check for bundle executable + plist_filepath = os.path.join(contents_dir, "Info.plist") + if os.path.exists(plist_filepath): + import plistlib + + if hasattr(plistlib, "load"): + with open(plist_filepath, "rb") as stream: + parsed_plist = plistlib.load(stream) + else: + parsed_plist = plistlib.readPlist(plist_filepath) + executable_filename = parsed_plist.get("CFBundleExecutable") + + if executable_filename: + executable = os.path.join( + contents_dir, "MacOS", executable_filename + ) + + return executable + + def as_args(self): + return [self.executable_path] + + def _realpath(self): + """Check if path is valid executable path.""" + # Check for executable in PATH + result = find_executable(self.executable_path) + if result is not None: + return result + + # This is not 100% validation but it is better than remove ability to + # launch .bat, .sh or extentionless files + if os.path.exists(self.executable_path): + return self.executable_path + return None + + def exists(self): + if not self.executable_path: + return False + return bool(self._realpath()) + + +class UndefinedApplicationExecutable(ApplicationExecutable): + """Some applications do not require executable path from settings. + + In that case this class is used to "fake" existing executable. + """ + def __init__(self): + pass + + def __str__(self): + return self.__class__.__name__ + + def __repr__(self): + return "<{}>".format(self.__class__.__name__) + + def as_args(self): + return [] + + def exists(self): + return True + + +class ApplicationGroup: + """Hold information about application group. + + Application group wraps different versions(variants) of application. + e.g. "maya" is group and "maya_2020" is variant. + + Group hold `host_name` which is implementation name used in AYON. Also + holds `enabled` if whole app group is enabled or `icon` for application + icon path in resources. + + Group has also `environment` which hold same environments for all variants. + + Args: + name (str): Groups' name. + data (dict): Group defying data loaded from settings. + manager (ApplicationManager): Manager that created the group. + """ + + def __init__(self, name, data, manager): + self.name = name + self.manager = manager + self._data = data + + self.enabled = data["enabled"] + self.label = data["label"] or None + self.icon = data["icon"] or None + env = {} + try: + env = json.loads(data["environment"]) + except Exception: + pass + self._environment = env + + host_name = data["host_name"] or None + self.is_host = host_name is not None + self.host_name = host_name + + settings_variants = data["variants"] + variants = {} + for variant_data in settings_variants: + app_variant = Application(variant_data, self) + variants[app_variant.name] = app_variant + + self.variants = variants + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.name) + + def __iter__(self): + for variant in self.variants.values(): + yield variant + + @property + def environment(self): + return copy.deepcopy(self._environment) + + +class Application: + """Hold information about application. + + Object by itself does nothing special. + + Args: + data (dict): Data for the version containing information about + executables, variant label or if is enabled. + Only required key is `executables`. + group (ApplicationGroup): App group object that created the application + and under which application belongs. + + """ + def __init__(self, data, group): + self._data = data + name = data["name"] + label = data["label"] or name + enabled = False + if group.enabled: + enabled = data.get("enabled", True) + + if group.label: + full_label = " ".join((group.label, label)) + else: + full_label = label + env = {} + try: + env = json.loads(data["environment"]) + except Exception: + pass + + arguments = data["arguments"] + if isinstance(arguments, dict): + arguments = arguments.get(platform.system().lower()) + + if not arguments: + arguments = [] + + _executables = data["executables"].get(platform.system().lower(), []) + executables = [ + ApplicationExecutable(executable) + for executable in _executables + ] + + self.group = group + + self.name = name + self.label = label + self.enabled = enabled + self.use_python_2 = data.get("use_python_2", False) + + self.full_name = "/".join((group.name, name)) + self.full_label = full_label + self.arguments = arguments + self.executables = executables + self._environment = env + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.full_name) + + @property + def environment(self): + return copy.deepcopy(self._environment) + + @property + def manager(self): + return self.group.manager + + @property + def host_name(self): + return self.group.host_name + + @property + def icon(self): + return self.group.icon + + @property + def is_host(self): + return self.group.is_host + + def find_executable(self): + """Try to find existing executable for application. + + Returns (str): Path to executable from `executables` or None if any + exists. + """ + for executable in self.executables: + if executable.exists(): + return executable + return None + + def launch(self, *args, **kwargs): + """Launch the application. + + For this purpose is used manager's launch method to keep logic at one + place. + + Arguments must match with manager's launch method. That's why *args + **kwargs are used. + + Returns: + subprocess.Popen: Return executed process as Popen object. + """ + return self.manager.launch(self.full_name, *args, **kwargs) + + +class EnvironmentToolGroup: + """Hold information about environment tool group. + + Environment tool group may hold different variants of same tool and set + environments that are same for all of them. + + e.g. "mtoa" may have different versions but all environments except one + are same. + + Args: + data (dict): Group information with variants. + manager (ApplicationManager): Manager that creates the group. + """ + + def __init__(self, data, manager): + name = data["name"] + label = data["label"] + + self.name = name + self.label = label + self._data = data + self.manager = manager + + environment = {} + try: + environment = json.loads(data["environment"]) + except Exception: + pass + self._environment = environment + + variants = data.get("variants") or [] + variants_by_name = {} + for variant_data in variants: + tool = EnvironmentTool(variant_data, self) + variants_by_name[tool.name] = tool + self.variants = variants_by_name + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.name) + + def __iter__(self): + for variant in self.variants.values(): + yield variant + + @property + def environment(self): + return copy.deepcopy(self._environment) + + +class EnvironmentTool: + """Hold information about application tool. + + Structure of tool information. + + Args: + variant_data (dict): Variant data with environments and + host and app variant filters. + group (EnvironmentToolGroup): Name of group which wraps tool. + """ + + def __init__(self, variant_data, group): + # Backwards compatibility 3.9.1 - 3.9.2 + # - 'variant_data' contained only environments but contain also host + # and application variant filters + name = variant_data["name"] + label = variant_data["label"] + host_names = variant_data["host_names"] + app_variants = variant_data["app_variants"] + + environment = {} + try: + environment = json.loads(variant_data["environment"]) + except Exception: + pass + + self.host_names = host_names + self.app_variants = app_variants + self.name = name + self.variant_label = label + self.label = " ".join((group.label, label)) + self.group = group + + self._environment = environment + self.full_name = "/".join((group.name, name)) + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.full_name) + + @property + def environment(self): + return copy.deepcopy(self._environment) + + def is_valid_for_app(self, app): + """Is tool valid for application. + + Args: + app (Application): Application for which are prepared environments. + """ + if self.app_variants and app.full_name not in self.app_variants: + return False + + if self.host_names and app.host_name not in self.host_names: + return False + return True diff --git a/server_addon/applications/client/ayon_applications/exceptions.py b/server_addon/applications/client/ayon_applications/exceptions.py new file mode 100644 index 0000000000..d5a48d3b6b --- /dev/null +++ b/server_addon/applications/client/ayon_applications/exceptions.py @@ -0,0 +1,50 @@ +class ApplicationNotFound(Exception): + """Application was not found in ApplicationManager by name.""" + + def __init__(self, app_name): + self.app_name = app_name + super(ApplicationNotFound, self).__init__( + "Application \"{}\" was not found.".format(app_name) + ) + + +class ApplicationExecutableNotFound(Exception): + """Defined executable paths are not available on the machine.""" + + def __init__(self, application): + self.application = application + details = None + if not application.executables: + msg = ( + "Executable paths for application \"{}\"({}) are not set." + ) + else: + msg = ( + "Defined executable paths for application \"{}\"({})" + " are not available on this machine." + ) + details = "Defined paths:" + for executable in application.executables: + details += "\n- " + executable.executable_path + + self.msg = msg.format(application.full_label, application.full_name) + self.details = details + + exc_mgs = str(self.msg) + if details: + # Is good idea to pass new line symbol to exception message? + exc_mgs += "\n" + details + self.exc_msg = exc_mgs + super(ApplicationExecutableNotFound, self).__init__(exc_mgs) + + +class ApplicationLaunchFailed(Exception): + """Application launch failed due to known reason. + + Message should be self explanatory as traceback won't be shown. + """ + pass + + +class MissingRequiredKey(KeyError): + pass diff --git a/server_addon/applications/client/ayon_applications/hooks.py b/server_addon/applications/client/ayon_applications/hooks.py new file mode 100644 index 0000000000..6aa12a210a --- /dev/null +++ b/server_addon/applications/client/ayon_applications/hooks.py @@ -0,0 +1,150 @@ +import platform +from abc import ABCMeta, abstractmethod + +import six + +from ayon_core.lib import Logger + +from .defs import LaunchTypes + + +@six.add_metaclass(ABCMeta) +class LaunchHook: + """Abstract base class of launch hook.""" + # Order of prelaunch hook, will be executed as last if set to None. + order = None + # List of host implementations, skipped if empty. + hosts = set() + # Set of application groups + app_groups = set() + # Set of specific application names + app_names = set() + # Set of platform availability + platforms = set() + # Set of launch types for which is available + # - if empty then is available for all launch types + # - by default has 'local' which is most common reason for launc hooks + launch_types = {LaunchTypes.local} + + def __init__(self, launch_context): + """Constructor of launch hook. + + Always should be called + """ + self.log = Logger.get_logger(self.__class__.__name__) + + self.launch_context = launch_context + + is_valid = self.class_validation(launch_context) + if is_valid: + is_valid = self.validate() + + self.is_valid = is_valid + + @classmethod + def class_validation(cls, launch_context): + """Validation of class attributes by launch context. + + Args: + launch_context (ApplicationLaunchContext): Context of launching + application. + + Returns: + bool: Is launch hook valid for the context by class attributes. + """ + if cls.platforms: + low_platforms = tuple( + _platform.lower() + for _platform in cls.platforms + ) + if platform.system().lower() not in low_platforms: + return False + + if cls.hosts: + if launch_context.host_name not in cls.hosts: + return False + + if cls.app_groups: + if launch_context.app_group.name not in cls.app_groups: + return False + + if cls.app_names: + if launch_context.app_name not in cls.app_names: + return False + + if cls.launch_types: + if launch_context.launch_type not in cls.launch_types: + return False + + return True + + @property + def data(self): + return self.launch_context.data + + @property + def application(self): + return getattr(self.launch_context, "application", None) + + @property + def manager(self): + return getattr(self.application, "manager", None) + + @property + def host_name(self): + return getattr(self.application, "host_name", None) + + @property + def app_group(self): + return getattr(self.application, "group", None) + + @property + def app_name(self): + return getattr(self.application, "full_name", None) + + @property + def addons_manager(self): + return getattr(self.launch_context, "addons_manager", None) + + @property + def modules_manager(self): + """ + Deprecated: + Use 'addons_wrapper' instead. + """ + return self.addons_manager + + def validate(self): + """Optional validation of launch hook on initialization. + + Returns: + bool: Hook is valid (True) or invalid (False). + """ + # QUESTION Not sure if this method has any usable potential. + # - maybe result can be based on settings + return True + + @abstractmethod + def execute(self, *args, **kwargs): + """Abstract execute method where logic of hook is.""" + pass + + +class PreLaunchHook(LaunchHook): + """Abstract class of prelaunch hook. + + This launch hook will be processed before application is launched. + + If any exception will happen during processing the application won't be + launched. + """ + + +class PostLaunchHook(LaunchHook): + """Abstract class of postlaunch hook. + + This launch hook will be processed after application is launched. + + Nothing will happen if any exception will happen during processing. And + processing of other postlaunch hooks won't stop either. + """ diff --git a/server_addon/applications/client/ayon_applications/manager.py b/server_addon/applications/client/ayon_applications/manager.py new file mode 100644 index 0000000000..dca2ff4491 --- /dev/null +++ b/server_addon/applications/client/ayon_applications/manager.py @@ -0,0 +1,676 @@ +import os +import sys +import copy +import json +import tempfile +import platform +import inspect +import subprocess + +import six + +from ayon_core import AYON_CORE_ROOT +from ayon_core.settings import get_studio_settings +from ayon_core.lib import ( + Logger, + modules_from_path, + classes_from_module, + get_linux_launcher_args, +) +from ayon_core.addon import AddonsManager + +from .constants import DEFAULT_ENV_SUBGROUP +from .exceptions import ( + ApplicationNotFound, + ApplicationExecutableNotFound, +) +from .hooks import PostLaunchHook, PreLaunchHook +from .defs import EnvironmentToolGroup, ApplicationGroup, LaunchTypes + + +class ApplicationManager: + """Load applications and tools and store them by their full name. + + Args: + studio_settings (dict): Preloaded studio settings. When passed manager + will always use these values. Gives ability to create manager + using different settings. + """ + + def __init__(self, studio_settings=None): + self.log = Logger.get_logger(self.__class__.__name__) + + self.app_groups = {} + self.applications = {} + self.tool_groups = {} + self.tools = {} + + self._studio_settings = studio_settings + + self.refresh() + + def set_studio_settings(self, studio_settings): + """Ability to change init system settings. + + This will trigger refresh of manager. + """ + self._studio_settings = studio_settings + + self.refresh() + + def refresh(self): + """Refresh applications from settings.""" + self.app_groups.clear() + self.applications.clear() + self.tool_groups.clear() + self.tools.clear() + + if self._studio_settings is not None: + settings = copy.deepcopy(self._studio_settings) + else: + settings = get_studio_settings( + clear_metadata=False, exclude_locals=False + ) + + applications_addon_settings = settings["applications"] + + # Prepare known applications + app_defs = applications_addon_settings["applications"] + additional_apps = app_defs.pop("additional_apps") + for additional_app in additional_apps: + app_name = additional_app.pop("name") + if app_name in app_defs: + self.log.warning(( + "Additional application '{}' is already" + " in built-in applications." + ).format(app_name)) + app_defs[app_name] = additional_app + + for group_name, variant_defs in app_defs.items(): + group = ApplicationGroup(group_name, variant_defs, self) + self.app_groups[group_name] = group + for app in group: + self.applications[app.full_name] = app + + tools_definitions = applications_addon_settings["tool_groups"] + for tool_group_data in tools_definitions: + group = EnvironmentToolGroup(tool_group_data, self) + self.tool_groups[group.name] = group + for tool in group: + self.tools[tool.full_name] = tool + + def find_latest_available_variant_for_group(self, group_name): + group = self.app_groups.get(group_name) + if group is None or not group.enabled: + return None + + output = None + for _, variant in reversed(sorted(group.variants.items())): + executable = variant.find_executable() + if executable: + output = variant + break + return output + + def create_launch_context(self, app_name, **data): + """Prepare launch context for application. + + Args: + app_name (str): Name of application that should be launched. + **data (Any): Any additional data. Data may be used during + + Returns: + ApplicationLaunchContext: Launch context for application. + + Raises: + ApplicationNotFound: Application was not found by entered name. + """ + + app = self.applications.get(app_name) + if not app: + raise ApplicationNotFound(app_name) + + executable = app.find_executable() + + return ApplicationLaunchContext( + app, executable, **data + ) + + def launch_with_context(self, launch_context): + """Launch application using existing launch context. + + Args: + launch_context (ApplicationLaunchContext): Prepared launch + context. + """ + + if not launch_context.executable: + raise ApplicationExecutableNotFound(launch_context.application) + return launch_context.launch() + + def launch(self, app_name, **data): + """Launch procedure. + + For host application it's expected to contain "project_name", + "folder_path" and "task_name". + + Args: + app_name (str): Name of application that should be launched. + **data (dict): Any additional data. Data may be used during + preparation to store objects usable in multiple places. + + Raises: + ApplicationNotFound: Application was not found by entered + argument `app_name`. + ApplicationExecutableNotFound: Executables in application definition + were not found on this machine. + ApplicationLaunchFailed: Something important for application launch + failed. Exception should contain explanation message, + traceback should not be needed. + """ + + context = self.create_launch_context(app_name, **data) + return self.launch_with_context(context) + + +class ApplicationLaunchContext: + """Context of launching application. + + Main purpose of context is to prepare launch arguments and keyword + arguments for new process. Most important part of keyword arguments + preparations are environment variables. + + During the whole process is possible to use `data` attribute to store + object usable in multiple places. + + Launch arguments are strings in list. It is possible to "chain" argument + when order of them matters. That is possible to do with adding list where + order is right and should not change. + NOTE: This is recommendation, not requirement. + e.g.: `["nuke.exe", "--NukeX"]` -> In this case any part of process may + insert argument between `nuke.exe` and `--NukeX`. To keep them together + it is better to wrap them in another list: `[["nuke.exe", "--NukeX"]]`. + + Notes: + It is possible to use launch context only to prepare environment + variables. In that case `executable` may be None and can be used + 'run_prelaunch_hooks' method to run prelaunch hooks which prepare + them. + + Args: + application (Application): Application definition. + executable (ApplicationExecutable): Object with path to executable. + env_group (Optional[str]): Environment variable group. If not set + 'DEFAULT_ENV_SUBGROUP' is used. + launch_type (Optional[str]): Launch type. If not set 'local' is used. + **data (dict): Any additional data. Data may be used during + preparation to store objects usable in multiple places. + """ + + def __init__( + self, + application, + executable, + env_group=None, + launch_type=None, + **data + ): + # Application object + self.application = application + + self.addons_manager = AddonsManager() + + # Logger + logger_name = "{}-{}".format(self.__class__.__name__, + self.application.full_name) + self.log = Logger.get_logger(logger_name) + + self.executable = executable + + if launch_type is None: + launch_type = LaunchTypes.local + self.launch_type = launch_type + + if env_group is None: + env_group = DEFAULT_ENV_SUBGROUP + + self.env_group = env_group + + self.data = dict(data) + + launch_args = [] + if executable is not None: + launch_args = executable.as_args() + # subprocess.Popen launch arguments (first argument in constructor) + self.launch_args = launch_args + self.launch_args.extend(application.arguments) + if self.data.get("app_args"): + self.launch_args.extend(self.data.pop("app_args")) + + # Handle launch environemtns + src_env = self.data.pop("env", None) + if src_env is not None and not isinstance(src_env, dict): + self.log.warning(( + "Passed `env` kwarg has invalid type: {}. Expected: `dict`." + " Using `os.environ` instead." + ).format(str(type(src_env)))) + src_env = None + + if src_env is None: + src_env = os.environ + + ignored_env = {"QT_API", } + env = { + key: str(value) + for key, value in src_env.items() + if key not in ignored_env + } + # subprocess.Popen keyword arguments + self.kwargs = {"env": env} + + if platform.system().lower() == "windows": + # Detach new process from currently running process on Windows + flags = ( + subprocess.CREATE_NEW_PROCESS_GROUP + | subprocess.DETACHED_PROCESS + ) + self.kwargs["creationflags"] = flags + + if not sys.stdout: + self.kwargs["stdout"] = subprocess.DEVNULL + self.kwargs["stderr"] = subprocess.DEVNULL + + self.prelaunch_hooks = None + self.postlaunch_hooks = None + + self.process = None + self._prelaunch_hooks_executed = False + + @property + def env(self): + if ( + "env" not in self.kwargs + or self.kwargs["env"] is None + ): + self.kwargs["env"] = {} + return self.kwargs["env"] + + @env.setter + def env(self, value): + if not isinstance(value, dict): + raise ValueError( + "'env' attribute expect 'dict' object. Got: {}".format( + str(type(value)) + ) + ) + self.kwargs["env"] = value + + @property + def modules_manager(self): + """ + Deprecated: + Use 'addons_manager' instead. + + """ + return self.addons_manager + + def _collect_addons_launch_hook_paths(self): + """Helper to collect application launch hooks from addons. + + Module have to have implemented 'get_launch_hook_paths' method which + can expect application as argument or nothing. + + Returns: + List[str]: Paths to launch hook directories. + """ + + expected_types = (list, tuple, set) + + output = [] + for module in self.addons_manager.get_enabled_addons(): + # Skip module if does not have implemented 'get_launch_hook_paths' + func = getattr(module, "get_launch_hook_paths", None) + if func is None: + continue + + func = module.get_launch_hook_paths + if hasattr(inspect, "signature"): + sig = inspect.signature(func) + expect_args = len(sig.parameters) > 0 + else: + expect_args = len(inspect.getargspec(func)[0]) > 0 + + # Pass application argument if method expect it. + try: + if expect_args: + hook_paths = func(self.application) + else: + hook_paths = func() + except Exception: + self.log.warning( + "Failed to call 'get_launch_hook_paths'", + exc_info=True + ) + continue + + if not hook_paths: + continue + + # Convert string to list + if isinstance(hook_paths, six.string_types): + hook_paths = [hook_paths] + + # Skip invalid types + if not isinstance(hook_paths, expected_types): + self.log.warning(( + "Result of `get_launch_hook_paths`" + " has invalid type {}. Expected {}" + ).format(type(hook_paths), expected_types)) + continue + + output.extend(hook_paths) + return output + + def paths_to_launch_hooks(self): + """Directory paths where to look for launch hooks.""" + # This method has potential to be part of application manager (maybe). + paths = [] + + # TODO load additional studio paths from settings + global_hooks_dir = os.path.join(AYON_CORE_ROOT, "hooks") + + hooks_dirs = [ + global_hooks_dir + ] + if self.host_name: + # If host requires launch hooks and is module then launch hooks + # should be collected using 'collect_launch_hook_paths' + # - module have to implement 'get_launch_hook_paths' + host_module = self.addons_manager.get_host_addon(self.host_name) + if not host_module: + hooks_dirs.append(os.path.join( + AYON_CORE_ROOT, "hosts", self.host_name, "hooks" + )) + + for path in hooks_dirs: + if ( + os.path.exists(path) + and os.path.isdir(path) + and path not in paths + ): + paths.append(path) + + # Load modules paths + paths.extend(self._collect_addons_launch_hook_paths()) + + return paths + + def discover_launch_hooks(self, force=False): + """Load and prepare launch hooks.""" + if ( + self.prelaunch_hooks is not None + or self.postlaunch_hooks is not None + ): + if not force: + self.log.info("Launch hooks were already discovered.") + return + + self.prelaunch_hooks.clear() + self.postlaunch_hooks.clear() + + self.log.debug("Discovery of launch hooks started.") + + paths = self.paths_to_launch_hooks() + self.log.debug("Paths searched for launch hooks:\n{}".format( + "\n".join("- {}".format(path) for path in paths) + )) + + all_classes = { + "pre": [], + "post": [] + } + for path in paths: + if not os.path.exists(path): + self.log.info( + "Path to launch hooks does not exist: \"{}\"".format(path) + ) + continue + + modules, _crashed = modules_from_path(path) + for _filepath, module in modules: + all_classes["pre"].extend( + classes_from_module(PreLaunchHook, module) + ) + all_classes["post"].extend( + classes_from_module(PostLaunchHook, module) + ) + + for launch_type, classes in all_classes.items(): + hooks_with_order = [] + hooks_without_order = [] + for klass in classes: + try: + hook = klass(self) + if not hook.is_valid: + self.log.debug( + "Skipped hook invalid for current launch context: " + "{}".format(klass.__name__) + ) + continue + + if inspect.isabstract(hook): + self.log.debug("Skipped abstract hook: {}".format( + klass.__name__ + )) + continue + + # Separate hooks by pre/post class + if hook.order is None: + hooks_without_order.append(hook) + else: + hooks_with_order.append(hook) + + except Exception: + self.log.warning( + "Initialization of hook failed: " + "{}".format(klass.__name__), + exc_info=True + ) + + # Sort hooks with order by order + ordered_hooks = list(sorted( + hooks_with_order, key=lambda obj: obj.order + )) + # Extend ordered hooks with hooks without defined order + ordered_hooks.extend(hooks_without_order) + + if launch_type == "pre": + self.prelaunch_hooks = ordered_hooks + else: + self.postlaunch_hooks = ordered_hooks + + self.log.debug("Found {} prelaunch and {} postlaunch hooks.".format( + len(self.prelaunch_hooks), len(self.postlaunch_hooks) + )) + + @property + def app_name(self): + return self.application.name + + @property + def host_name(self): + return self.application.host_name + + @property + def app_group(self): + return self.application.group + + @property + def manager(self): + return self.application.manager + + def _run_process(self): + # Windows and MacOS have easier process start + low_platform = platform.system().lower() + if low_platform in ("windows", "darwin"): + return subprocess.Popen(self.launch_args, **self.kwargs) + + # Linux uses mid process + # - it is possible that the mid process executable is not + # available for this version of AYON in that case use standard + # launch + launch_args = get_linux_launcher_args() + if launch_args is None: + return subprocess.Popen(self.launch_args, **self.kwargs) + + # Prepare data that will be passed to midprocess + # - store arguments to a json and pass path to json as last argument + # - pass environments to set + app_env = self.kwargs.pop("env", {}) + json_data = { + "args": self.launch_args, + "env": app_env + } + if app_env: + # Filter environments of subprocess + self.kwargs["env"] = { + key: value + for key, value in os.environ.items() + if key in app_env + } + + # Create temp file + json_temp = tempfile.NamedTemporaryFile( + mode="w", prefix="op_app_args", suffix=".json", delete=False + ) + json_temp.close() + json_temp_filpath = json_temp.name + with open(json_temp_filpath, "w") as stream: + json.dump(json_data, stream) + + launch_args.append(json_temp_filpath) + + # Create mid-process which will launch application + process = subprocess.Popen(launch_args, **self.kwargs) + # Wait until the process finishes + # - This is important! The process would stay in "open" state. + process.wait() + # Remove the temp file + os.remove(json_temp_filpath) + # Return process which is already terminated + return process + + def run_prelaunch_hooks(self): + """Run prelaunch hooks. + + This method will be executed only once, any future calls will skip + the processing. + """ + + if self._prelaunch_hooks_executed: + self.log.warning("Prelaunch hooks were already executed.") + return + # Discover launch hooks + self.discover_launch_hooks() + + # Execute prelaunch hooks + for prelaunch_hook in self.prelaunch_hooks: + self.log.debug("Executing prelaunch hook: {}".format( + str(prelaunch_hook.__class__.__name__) + )) + prelaunch_hook.execute() + self._prelaunch_hooks_executed = True + + def launch(self): + """Collect data for new process and then create it. + + This method must not be executed more than once. + + Returns: + subprocess.Popen: Created process as Popen object. + """ + if self.process is not None: + self.log.warning("Application was already launched.") + return + + if not self._prelaunch_hooks_executed: + self.run_prelaunch_hooks() + + self.log.debug("All prelaunch hook executed. Starting new process.") + + # Prepare subprocess args + args_len_str = "" + if isinstance(self.launch_args, str): + args = self.launch_args + else: + args = self.clear_launch_args(self.launch_args) + args_len_str = " ({})".format(len(args)) + self.log.info( + "Launching \"{}\" with args{}: {}".format( + self.application.full_name, args_len_str, args + ) + ) + self.launch_args = args + + # Run process + self.process = self._run_process() + + # Process post launch hooks + for postlaunch_hook in self.postlaunch_hooks: + self.log.debug("Executing postlaunch hook: {}".format( + str(postlaunch_hook.__class__.__name__) + )) + + # TODO how to handle errors? + # - store to variable to let them accessible? + try: + postlaunch_hook.execute() + + except Exception: + self.log.warning( + "After launch procedures were not successful.", + exc_info=True + ) + + self.log.debug("Launch of {} finished.".format( + self.application.full_name + )) + + return self.process + + @staticmethod + def clear_launch_args(args): + """Collect launch arguments to final order. + + Launch argument should be list that may contain another lists this + function will upack inner lists and keep ordering. + + ``` + # source + [ [ arg1, [ arg2, arg3 ] ], arg4, [arg5, arg6]] + # result + [ arg1, arg2, arg3, arg4, arg5, arg6] + + Args: + args (list): Source arguments in list may contain inner lists. + + Return: + list: Unpacked arguments. + """ + if isinstance(args, str): + return args + all_cleared = False + while not all_cleared: + all_cleared = True + new_args = [] + for arg in args: + if isinstance(arg, (list, tuple, set)): + all_cleared = False + for _arg in arg: + new_args.append(_arg) + else: + new_args.append(arg) + args = new_args + + return args + diff --git a/server_addon/applications/client/ayon_applications/plugins/publish/collect_app_name.py b/server_addon/applications/client/ayon_applications/plugins/publish/collect_app_name.py new file mode 100644 index 0000000000..f54a551cda --- /dev/null +++ b/server_addon/applications/client/ayon_applications/plugins/publish/collect_app_name.py @@ -0,0 +1,48 @@ +""" +Run after global plugin 'CollectHostName' in ayon_core. + +Requires: + None + +Provides: + context -> hostName (str) + context -> appName (str) + context -> appLabel (str) +""" +import os +import pyblish.api + +from ayon_applications import ApplicationManager + + +class CollectAppName(pyblish.api.ContextPlugin): + """Collect avalon host name to context.""" + + label = "Collect App Name" + order = pyblish.api.CollectorOrder - 0.499999 + + def process(self, context): + host_name = context.data.get("hostName") + app_name = context.data.get("appName") + app_label = context.data.get("appLabel") + # Don't override value if is already set + if host_name and app_name and app_label: + return + + # Use AYON_APP_NAME to get full app name + if not app_name: + app_name = os.environ.get("AYON_APP_NAME") + + # Fill missing values based on app full name + if (not host_name or not app_label) and app_name: + app_manager = ApplicationManager() + app = app_manager.applications.get(app_name) + if app: + if not host_name: + host_name = app.host_name + if not app_label: + app_label = app.full_label + + context.data["hostName"] = host_name + context.data["appName"] = app_name + context.data["appLabel"] = app_label diff --git a/server_addon/applications/client/ayon_applications/utils.py b/server_addon/applications/client/ayon_applications/utils.py new file mode 100644 index 0000000000..234fa6c683 --- /dev/null +++ b/server_addon/applications/client/ayon_applications/utils.py @@ -0,0 +1,609 @@ +import os +import copy +import json +import platform +import collections + +import six +import acre + +from ayon_core import AYON_CORE_ROOT +from ayon_core.settings import get_project_settings +from ayon_core.lib import Logger, get_ayon_username +from ayon_core.addon import AddonsManager +from ayon_core.pipeline import HOST_WORKFILE_EXTENSIONS +from ayon_core.pipeline.template_data import get_template_data +from ayon_core.pipeline.workfile import ( + get_workfile_template_key, + get_workdir_with_workdir_data, + get_last_workfile, + should_use_last_workfile_on_launch, + should_open_workfiles_tool_on_launch, +) + +from .constants import PLATFORM_NAMES, DEFAULT_ENV_SUBGROUP +from .exceptions import MissingRequiredKey, ApplicationLaunchFailed +from .manager import ApplicationManager + + +def parse_environments(env_data, env_group=None, platform_name=None): + """Parse environment values from settings byt group and platform. + + Data may contain up to 2 hierarchical levels of dictionaries. At the end + of the last level must be string or list. List is joined using platform + specific joiner (';' for windows and ':' for linux and mac). + + Hierarchical levels can contain keys for subgroups and platform name. + Platform specific values must be always last level of dictionary. Platform + names are "windows" (MS Windows), "linux" (any linux distribution) and + "darwin" (any MacOS distribution). + + Subgroups are helpers added mainly for standard and on farm usage. Farm + may require different environments for e.g. licence related values or + plugins. Default subgroup is "standard". + + Examples: + ``` + { + # Unchanged value + "ENV_KEY1": "value", + # Empty values are kept (unset environment variable) + "ENV_KEY2": "", + + # Join list values with ':' or ';' + "ENV_KEY3": ["value1", "value2"], + + # Environment groups + "ENV_KEY4": { + "standard": "DEMO_SERVER_URL", + "farm": "LICENCE_SERVER_URL" + }, + + # Platform specific (and only for windows and mac) + "ENV_KEY5": { + "windows": "windows value", + "darwin": ["value 1", "value 2"] + }, + + # Environment groups and platform combination + "ENV_KEY6": { + "farm": "FARM_VALUE", + "standard": { + "windows": ["value1", "value2"], + "linux": "value1", + "darwin": "" + } + } + } + ``` + """ + output = {} + if not env_data: + return output + + if not env_group: + env_group = DEFAULT_ENV_SUBGROUP + + if not platform_name: + platform_name = platform.system().lower() + + for key, value in env_data.items(): + if isinstance(value, dict): + # Look if any key is platform key + # - expect that represents environment group if does not contain + # platform keys + if not PLATFORM_NAMES.intersection(set(value.keys())): + # Skip the key if group is not available + if env_group not in value: + continue + value = value[env_group] + + # Check again if value is dictionary + # - this time there should be only platform keys + if isinstance(value, dict): + value = value.get(platform_name) + + # Check if value is list and join it's values + # QUESTION Should empty values be skipped? + if isinstance(value, (list, tuple)): + value = os.pathsep.join(value) + + # Set key to output if value is string + if isinstance(value, six.string_types): + output[key] = value + return output + + +class EnvironmentPrepData(dict): + """Helper dictionary for storin temp data during environment prep. + + Args: + data (dict): Data must contain required keys. + """ + required_keys = ( + "project_entity", "folder_entity", "task_entity", "app", "anatomy" + ) + + def __init__(self, data): + for key in self.required_keys: + if key not in data: + raise MissingRequiredKey(key) + + if not data.get("log"): + data["log"] = Logger.get_logger("EnvironmentPrepData") + + if data.get("env") is None: + data["env"] = os.environ.copy() + + project_name = data["project_entity"]["name"] + if "project_settings" not in data: + data["project_settings"] = get_project_settings(project_name) + + super(EnvironmentPrepData, self).__init__(data) + + +def get_app_environments_for_context( + project_name, + folder_path, + task_name, + app_name, + env_group=None, + launch_type=None, + env=None, + addons_manager=None +): + """Prepare environment variables by context. + Args: + project_name (str): Name of project. + folder_path (str): Folder path. + task_name (str): Name of task. + app_name (str): Name of application that is launched and can be found + by ApplicationManager. + env_group (Optional[str]): Name of environment group. If not passed + default group is used. + launch_type (Optional[str]): Type for which prelaunch hooks are + executed. + env (Optional[dict[str, str]]): Initial environment variables. + `os.environ` is used when not passed. + addons_manager (Optional[AddonsManager]): Initialized modules + manager. + + Returns: + dict: Environments for passed context and application. + """ + + # Prepare app object which can be obtained only from ApplicationManager + app_manager = ApplicationManager() + context = app_manager.create_launch_context( + app_name, + project_name=project_name, + folder_path=folder_path, + task_name=task_name, + env_group=env_group, + launch_type=launch_type, + env=env, + addons_manager=addons_manager, + modules_manager=addons_manager, + ) + context.run_prelaunch_hooks() + return context.env + + +def _merge_env(env, current_env): + """Modified function(merge) from acre module.""" + result = current_env.copy() + for key, value in env.items(): + # Keep missing keys by not filling `missing` kwarg + value = acre.lib.partial_format(value, data=current_env) + result[key] = value + return result + + +def _add_python_version_paths(app, env, logger, addons_manager): + """Add vendor packages specific for a Python version.""" + + for addon in addons_manager.get_enabled_addons(): + addon.modify_application_launch_arguments(app, env) + + # Skip adding if host name is not set + if not app.host_name: + return + + # Add Python 2/3 modules + python_vendor_dir = os.path.join( + AYON_CORE_ROOT, + "vendor", + "python" + ) + if app.use_python_2: + pythonpath = os.path.join(python_vendor_dir, "python_2") + else: + pythonpath = os.path.join(python_vendor_dir, "python_3") + + if not os.path.exists(pythonpath): + return + + logger.debug("Adding Python version specific paths to PYTHONPATH") + python_paths = [pythonpath] + + # Load PYTHONPATH from current launch context + python_path = env.get("PYTHONPATH") + if python_path: + python_paths.append(python_path) + + # Set new PYTHONPATH to launch context environments + env["PYTHONPATH"] = os.pathsep.join(python_paths) + + +def prepare_app_environments( + data, env_group=None, implementation_envs=True, addons_manager=None +): + """Modify launch environments based on launched app and context. + + Args: + data (EnvironmentPrepData): Dictionary where result and intermediate + result will be stored. + + """ + app = data["app"] + log = data["log"] + source_env = data["env"].copy() + + if addons_manager is None: + addons_manager = AddonsManager() + + _add_python_version_paths(app, source_env, log, addons_manager) + + # Use environments from local settings + filtered_local_envs = {} + # NOTE Overrides for environment variables are not implemented in AYON. + # project_settings = data["project_settings"] + # whitelist_envs = project_settings["general"].get("local_env_white_list") + # if whitelist_envs: + # local_settings = get_local_settings() + # local_envs = local_settings.get("environments") or {} + # filtered_local_envs = { + # key: value + # for key, value in local_envs.items() + # if key in whitelist_envs + # } + + # Apply local environment variables for already existing values + for key, value in filtered_local_envs.items(): + if key in source_env: + source_env[key] = value + + # `app_and_tool_labels` has debug purpose + app_and_tool_labels = [app.full_name] + # Environments for application + environments = [ + app.group.environment, + app.environment + ] + + folder_entity = data.get("folder_entity") + # Add tools environments + groups_by_name = {} + tool_by_group_name = collections.defaultdict(dict) + if folder_entity: + # Make sure each tool group can be added only once + for key in folder_entity["attrib"].get("tools") or []: + tool = app.manager.tools.get(key) + if not tool or not tool.is_valid_for_app(app): + continue + groups_by_name[tool.group.name] = tool.group + tool_by_group_name[tool.group.name][tool.name] = tool + + for group_name in sorted(groups_by_name.keys()): + group = groups_by_name[group_name] + environments.append(group.environment) + for tool_name in sorted(tool_by_group_name[group_name].keys()): + tool = tool_by_group_name[group_name][tool_name] + environments.append(tool.environment) + app_and_tool_labels.append(tool.full_name) + + log.debug( + "Will add environments for apps and tools: {}".format( + ", ".join(app_and_tool_labels) + ) + ) + + env_values = {} + for _env_values in environments: + if not _env_values: + continue + + # Choose right platform + tool_env = parse_environments(_env_values, env_group) + + # Apply local environment variables + # - must happen between all values because they may be used during + # merge + for key, value in filtered_local_envs.items(): + if key in tool_env: + tool_env[key] = value + + # Merge dictionaries + env_values = _merge_env(tool_env, env_values) + + merged_env = _merge_env(env_values, source_env) + + loaded_env = acre.compute(merged_env, cleanup=False) + + final_env = None + # Add host specific environments + if app.host_name and implementation_envs: + host_addon = addons_manager.get_host_addon(app.host_name) + add_implementation_envs = None + if host_addon: + add_implementation_envs = getattr( + host_addon, "add_implementation_envs", None + ) + if add_implementation_envs: + # Function may only modify passed dict without returning value + final_env = add_implementation_envs(loaded_env, app) + + if final_env is None: + final_env = loaded_env + + keys_to_remove = set(source_env.keys()) - set(final_env.keys()) + + # Update env + data["env"].update(final_env) + for key in keys_to_remove: + data["env"].pop(key, None) + + +def apply_project_environments_value( + project_name, env, project_settings=None, env_group=None +): + """Apply project specific environments on passed environments. + + The environments are applied on passed `env` argument value so it is not + required to apply changes back. + + Args: + project_name (str): Name of project for which environments should be + received. + env (dict): Environment values on which project specific environments + will be applied. + project_settings (dict): Project settings for passed project name. + Optional if project settings are already prepared. + + Returns: + dict: Passed env values with applied project environments. + + Raises: + KeyError: If project settings do not contain keys for project specific + environments. + + """ + if project_settings is None: + project_settings = get_project_settings(project_name) + + env_value = project_settings["core"]["project_environments"] + if env_value: + env_value = json.loads(env_value) + parsed_value = parse_environments(env_value, env_group) + env.update(acre.compute( + _merge_env(parsed_value, env), + cleanup=False + )) + return env + + +def prepare_context_environments(data, env_group=None, addons_manager=None): + """Modify launch environments with context data for launched host. + + Args: + data (EnvironmentPrepData): Dictionary where result and intermediate + result will be stored. + + """ + # Context environments + log = data["log"] + + project_entity = data["project_entity"] + folder_entity = data["folder_entity"] + task_entity = data["task_entity"] + if not project_entity: + log.info( + "Skipping context environments preparation." + " Launch context does not contain required data." + ) + return + + # Load project specific environments + project_name = project_entity["name"] + project_settings = get_project_settings(project_name) + data["project_settings"] = project_settings + + app = data["app"] + context_env = { + "AYON_PROJECT_NAME": project_entity["name"], + "AYON_APP_NAME": app.full_name + } + if folder_entity: + folder_path = folder_entity["path"] + context_env["AYON_FOLDER_PATH"] = folder_path + + if task_entity: + context_env["AYON_TASK_NAME"] = task_entity["name"] + + log.debug( + "Context environments set:\n{}".format( + json.dumps(context_env, indent=4) + ) + ) + data["env"].update(context_env) + + # Apply project specific environments on current env value + # - apply them once the context environments are set + apply_project_environments_value( + project_name, data["env"], project_settings, env_group + ) + + if not app.is_host: + return + + data["env"]["AYON_HOST_NAME"] = app.host_name + + if not folder_entity or not task_entity: + # QUESTION replace with log.info and skip workfile discovery? + # - technically it should be possible to launch host without context + raise ApplicationLaunchFailed( + "Host launch require folder and task context." + ) + + workdir_data = get_template_data( + project_entity, + folder_entity, + task_entity, + app.host_name, + project_settings + ) + data["workdir_data"] = workdir_data + + anatomy = data["anatomy"] + + task_type = workdir_data["task"]["type"] + # Temp solution how to pass task type to `_prepare_last_workfile` + data["task_type"] = task_type + + try: + workdir = get_workdir_with_workdir_data( + workdir_data, + anatomy.project_name, + anatomy, + project_settings=project_settings + ) + + except Exception as exc: + raise ApplicationLaunchFailed( + "Error in anatomy.format: {}".format(str(exc)) + ) + + if not os.path.exists(workdir): + log.debug( + "Creating workdir folder: \"{}\"".format(workdir) + ) + try: + os.makedirs(workdir) + except Exception as exc: + raise ApplicationLaunchFailed( + "Couldn't create workdir because: {}".format(str(exc)) + ) + + data["env"]["AYON_WORKDIR"] = workdir + + _prepare_last_workfile(data, workdir, addons_manager) + + +def _prepare_last_workfile(data, workdir, addons_manager): + """last workfile workflow preparation. + + Function check if should care about last workfile workflow and tries + to find the last workfile. Both information are stored to `data` and + environments. + + Last workfile is filled always (with version 1) even if any workfile + exists yet. + + Args: + data (EnvironmentPrepData): Dictionary where result and intermediate + result will be stored. + workdir (str): Path to folder where workfiles should be stored. + + """ + if not addons_manager: + addons_manager = AddonsManager() + + log = data["log"] + + _workdir_data = data.get("workdir_data") + if not _workdir_data: + log.info( + "Skipping last workfile preparation." + " Key `workdir_data` not filled." + ) + return + + app = data["app"] + workdir_data = copy.deepcopy(_workdir_data) + project_name = data["project_name"] + task_name = data["task_name"] + task_type = data["task_type"] + + start_last_workfile = data.get("start_last_workfile") + if start_last_workfile is None: + start_last_workfile = should_use_last_workfile_on_launch( + project_name, app.host_name, task_name, task_type + ) + else: + log.info("Opening of last workfile was disabled by user") + + data["start_last_workfile"] = start_last_workfile + + workfile_startup = should_open_workfiles_tool_on_launch( + project_name, app.host_name, task_name, task_type + ) + data["workfile_startup"] = workfile_startup + + # Store boolean as "0"(False) or "1"(True) + data["env"]["AVALON_OPEN_LAST_WORKFILE"] = ( + str(int(bool(start_last_workfile))) + ) + data["env"]["AYON_WORKFILE_TOOL_ON_START"] = ( + str(int(bool(workfile_startup))) + ) + + _sub_msg = "" if start_last_workfile else " not" + log.debug( + "Last workfile should{} be opened on start.".format(_sub_msg) + ) + + # Last workfile path + last_workfile_path = data.get("last_workfile_path") or "" + if not last_workfile_path: + host_addon = addons_manager.get_host_addon(app.host_name) + if host_addon: + extensions = host_addon.get_workfile_extensions() + else: + extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) + + if extensions: + anatomy = data["anatomy"] + project_settings = data["project_settings"] + task_type = workdir_data["task"]["type"] + template_key = get_workfile_template_key( + project_name, + task_type, + app.host_name, + project_settings=project_settings + ) + # Find last workfile + file_template = anatomy.get_template_item( + "work", template_key, "file" + ).template + + workdir_data.update({ + "version": 1, + "user": get_ayon_username(), + "ext": extensions[0] + }) + + last_workfile_path = get_last_workfile( + workdir, file_template, workdir_data, extensions, True + ) + + if os.path.exists(last_workfile_path): + log.debug(( + "Workfiles for launch context does not exists" + " yet but path will be set." + )) + log.debug( + "Setting last workfile path: {}".format(last_workfile_path) + ) + + data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path + data["last_workfile_path"] = last_workfile_path diff --git a/server_addon/applications/package.py b/server_addon/applications/package.py new file mode 100644 index 0000000000..ce312ed662 --- /dev/null +++ b/server_addon/applications/package.py @@ -0,0 +1,3 @@ +name = "applications" +title = "Applications" +version = "0.2.0" diff --git a/server_addon/applications/server/__init__.py b/server_addon/applications/server/__init__.py index d5c2de3df3..d85678b77b 100644 --- a/server_addon/applications/server/__init__.py +++ b/server_addon/applications/server/__init__.py @@ -3,9 +3,9 @@ import json import copy from ayon_server.addons import BaseServerAddon, AddonLibrary +from ayon_server.entities.core import attribute_library from ayon_server.lib.postgres import Postgres -from .version import __version__ from .settings import ApplicationsAddonSettings, DEFAULT_VALUES try: @@ -86,9 +86,6 @@ def get_enum_items_from_groups(groups): class ApplicationsAddon(BaseServerAddon): - name = "applications" - title = "Applications" - version = __version__ settings_model = ApplicationsAddonSettings async def get_default_settings(self): @@ -118,9 +115,28 @@ class ApplicationsAddon(BaseServerAddon): ) async def setup(self): - need_restart = await self.create_applications_attribute() + need_restart = await self.create_required_attributes() if need_restart: self.request_server_restart() + await self._update_enums() + + def _get_applications_def(self): + return { + "name": "applications", + "type": "list_of_strings", + "title": "Applications", + "scope": ["project"], + "enum":[], + } + + def _get_tools_def(self): + return { + "name": "tools", + "type": "list_of_strings", + "title": "Tools", + "scope": ["project", "folder", "task"], + "enum":[], + } async def create_applications_attribute(self) -> bool: """Make sure there are required attributes which ftrack addon needs. @@ -129,6 +145,73 @@ class ApplicationsAddon(BaseServerAddon): bool: 'True' if an attribute was created or updated. """ + need_restart = await self.create_required_attributes() + await self._update_enums() + return need_restart + + async def create_required_attributes(self) -> bool: + """Make sure there are required 'applications' and 'tools' attributes. + This only checks for the existence of the attributes, it does not populate + them with any data. When an attribute is added, server needs to be restarted, + while adding enum data to the attribute does not require a restart. + Returns: + bool: 'True' if an attribute was created or updated. + """ + + # keep track of the last attribute position (for adding new attributes) + apps_attribute_data = self._get_applications_def() + tools_attribute_data = self._get_tools_def() + + apps_attrib_name = apps_attribute_data["name"] + tools_attrib_name = tools_attribute_data["name"] + + async with Postgres.acquire() as conn, conn.transaction(): + query = "SELECT BOOL_OR(name = 'applications') AS has_applications, BOOL_OR(name = 'tools') AS has_tools FROM attributes;" + result = (await conn.fetch(query))[0] + + attributes_to_create = {} + if not result["has_applications"]: + attributes_to_create[apps_attrib_name] = { + "scope": apps_attribute_data["scope"], + "data": { + "title": apps_attribute_data["title"], + "type": apps_attribute_data["type"], + "enum": [], + } + } + + if not result["has_tools"]: + attributes_to_create[tools_attrib_name] = { + "scope": tools_attribute_data["scope"], + "data": { + "title": tools_attribute_data["title"], + "type": tools_attribute_data["type"], + "enum": [], + }, + } + + needs_restart = False + # when any of the required attributes are not present, add them + # and return 'True' to indicate that server needs to be restarted + for name, payload in attributes_to_create.items(): + insert_query = "INSERT INTO attributes (name, scope, data, position) VALUES ($1, $2, $3, (SELECT COALESCE(MAX(position), 0) + 1 FROM attributes)) ON CONFLICT DO NOTHING" + await conn.execute( + insert_query, + name, + payload["scope"], + payload["data"], + ) + needs_restart = True + + return needs_restart + + async def _update_enums(self): + """Updates applications and tools enums based on the addon settings. + This method is called when the addon is started (after we are sure that the + 'applications' and 'tools' attributes exist) and when the addon settings are + updated (using on_settings_updated method). + """ + instance = AddonLibrary.getinstance() app_defs = instance.data.get(self.name) all_applications = [] @@ -148,33 +231,32 @@ class ApplicationsAddon(BaseServerAddon): merge_groups(all_applications, app_groups) merge_groups(all_tools, studio_settings["tool_groups"]) - query = "SELECT name, position, scope, data from public.attributes" - apps_attrib_name = "applications" tools_attrib_name = "tools" apps_enum = get_enum_items_from_groups(all_applications) tools_enum = get_enum_items_from_groups(all_tools) + apps_attribute_data = { "type": "list_of_strings", "title": "Applications", - "enum": apps_enum + "enum": apps_enum, } tools_attribute_data = { "type": "list_of_strings", "title": "Tools", - "enum": tools_enum + "enum": tools_enum, } + apps_scope = ["project"] tools_scope = ["project", "folder", "task"] - apps_match_position = None apps_matches = False - tools_match_position = None tools_matches = False - position = 1 - async for row in Postgres.iterate(query): - position += 1 + + async for row in Postgres.iterate( + "SELECT name, position, scope, data from public.attributes" + ): if row["name"] == apps_attrib_name: # Check if scope is matching ftrack addon requirements if ( @@ -182,7 +264,6 @@ class ApplicationsAddon(BaseServerAddon): and row["data"].get("enum") == apps_enum ): apps_matches = True - apps_match_position = row["position"] elif row["name"] == tools_attrib_name: if ( @@ -190,45 +271,41 @@ class ApplicationsAddon(BaseServerAddon): and row["data"].get("enum") == tools_enum ): tools_matches = True - tools_match_position = row["position"] if apps_matches and tools_matches: - return False + return - postgre_query = "\n".join(( - "INSERT INTO public.attributes", - " (name, position, scope, data)", - "VALUES", - " ($1, $2, $3, $4)", - "ON CONFLICT (name)", - "DO UPDATE SET", - " scope = $3,", - " data = $4", - )) if not apps_matches: - # Reuse position from found attribute - if apps_match_position is None: - apps_match_position = position - position += 1 - await Postgres.execute( - postgre_query, - apps_attrib_name, - apps_match_position, + """ + UPDATE attributes SET + scope = $1, + data = $2 + WHERE + name = $3 + """, apps_scope, apps_attribute_data, + apps_attrib_name, ) if not tools_matches: - if tools_match_position is None: - tools_match_position = position - position += 1 - await Postgres.execute( - postgre_query, - tools_attrib_name, - tools_match_position, + """ + UPDATE attributes SET + scope = $1, + data = $2 + WHERE + name = $3 + """, tools_scope, tools_attribute_data, + tools_attrib_name, ) - return True + + # Reset attributes cache on server + await attribute_library.load() + + async def on_settings_changed(self, *args, **kwargs): + _ = args, kwargs + await self._update_enums() diff --git a/server_addon/applications/server/applications.json b/server_addon/applications/server/applications.json index 85bf6f1dda..e4b72fdff9 100644 --- a/server_addon/applications/server/applications.json +++ b/server_addon/applications/server/applications.json @@ -7,6 +7,26 @@ "host_name": "maya", "environment": "{\n \"MAYA_DISABLE_CLIC_IPM\": \"Yes\",\n \"MAYA_DISABLE_CIP\": \"Yes\",\n \"MAYA_DISABLE_CER\": \"Yes\",\n \"PYMEL_SKIP_MEL_INIT\": \"Yes\",\n \"LC_ALL\": \"C\"\n}\n", "variants": [ + { + "name": "2025", + "label": "2025", + "executables": { + "windows": [ + "C:\\Program Files\\Autodesk\\Maya2025\\bin\\maya.exe" + ], + "darwin": ["/Applications/Autodesk/maya2025/Maya.app"], + "linux": [ + "/usr/autodesk/maya2025/bin/maya" + ] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": "{\n \"MAYA_VERSION\": \"2025\"\n}", + "use_python_2": false + }, { "name": "2024", "label": "2024", diff --git a/server_addon/applications/server/version.py b/server_addon/applications/server/version.py deleted file mode 100644 index 9cb17e7976..0000000000 --- a/server_addon/applications/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.8" diff --git a/server_addon/blender/server/settings/main.py b/server_addon/blender/server/settings/main.py index aed9b5632d..3cca22ae3b 100644 --- a/server_addon/blender/server/settings/main.py +++ b/server_addon/blender/server/settings/main.py @@ -6,7 +6,7 @@ from ayon_server.settings import ( from .imageio import BlenderImageIOModel from .publish_plugins import ( - PublishPuginsModel, + PublishPluginsModel, DEFAULT_BLENDER_PUBLISH_SETTINGS ) from .render_settings import ( @@ -47,8 +47,8 @@ class BlenderSettings(BaseSettingsModel): default_factory=TemplateWorkfileBaseOptions, title="Workfile Builder" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish Plugins" ) diff --git a/server_addon/blender/server/settings/publish_plugins.py b/server_addon/blender/server/settings/publish_plugins.py index c742fdc5bd..e998d7b057 100644 --- a/server_addon/blender/server/settings/publish_plugins.py +++ b/server_addon/blender/server/settings/publish_plugins.py @@ -66,7 +66,7 @@ class ExtractPlayblastModel(BaseSettingsModel): return validate_json_dict(value) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): ValidateCameraZeroKeyframe: ValidatePluginModel = SettingsField( default_factory=ValidatePluginModel, title="Validate Camera Zero Keyframe", diff --git a/server_addon/celaction/server/settings.py b/server_addon/celaction/server/settings.py index 9208948a07..afa9773477 100644 --- a/server_addon/celaction/server/settings.py +++ b/server_addon/celaction/server/settings.py @@ -42,7 +42,7 @@ class WorkfileModel(BaseSettingsModel): ) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectRenderPath: CollectRenderPathModel = SettingsField( default_factory=CollectRenderPathModel, title="Collect Render Path" @@ -57,8 +57,8 @@ class CelActionSettings(BaseSettingsModel): workfile: WorkfileModel = SettingsField( title="Workfile" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish plugins", ) diff --git a/server_addon/create_ayon_addons.py b/server_addon/create_ayon_addons.py index c2686199be..bfd601af07 100644 --- a/server_addon/create_ayon_addons.py +++ b/server_addon/create_ayon_addons.py @@ -4,6 +4,8 @@ import re import shutil import argparse import zipfile +import types +import importlib import platform import collections from pathlib import Path @@ -44,6 +46,11 @@ version = "{addon_version}" plugin_for = ["ayon_server"] """ +CLIENT_VERSION_CONTENT = '''# -*- coding: utf-8 -*- +"""Package declaring AYON core addon version.""" +__version__ = "{}" +''' + class ZipFileLongPaths(zipfile.ZipFile): """Allows longer paths in zip files. @@ -175,13 +182,75 @@ def create_addon_zip( shutil.rmtree(str(output_dir / addon_name)) +def prepare_client_code( + addon_dir: Path, + addon_output_dir: Path, + addon_version: str +): + client_dir = addon_dir / "client" + if not client_dir.exists(): + return + + # Prepare private dir in output + private_dir = addon_output_dir / "private" + private_dir.mkdir(parents=True, exist_ok=True) + + # Copy pyproject toml if available + pyproject_toml = client_dir / "pyproject.toml" + if pyproject_toml.exists(): + shutil.copy(pyproject_toml, private_dir) + + for subpath in client_dir.iterdir(): + if subpath.name == "pyproject.toml": + continue + + if subpath.is_file(): + continue + + # Update version.py with server version if 'version.py' is available + version_path = subpath / "version.py" + if version_path.exists(): + with open(version_path, "w") as stream: + stream.write(CLIENT_VERSION_CONTENT.format(addon_version)) + + zip_filepath = private_dir / "client.zip" + with ZipFileLongPaths(zip_filepath, "w", zipfile.ZIP_DEFLATED) as zipf: + # Add client code content to zip + for path, sub_path in find_files_in_subdir(str(subpath)): + sub_path = os.path.join(subpath.name, sub_path) + zipf.write(path, sub_path) + + +def import_filepath(path: Path, module_name: Optional[str] = None): + if not module_name: + module_name = os.path.splitext(path.name)[0] + + # Convert to string + path = str(path) + module = types.ModuleType(module_name) + module.__file__ = path + + # Use loader so module has full specs + module_loader = importlib.machinery.SourceFileLoader( + module_name, path + ) + module_loader.exec_module(module) + return module + + def create_addon_package( addon_dir: Path, output_dir: Path, create_zip: bool, keep_source: bool, ): - addon_version = get_addon_version(addon_dir) + src_package_py = addon_dir / "package.py" + package = None + if src_package_py.exists(): + package = import_filepath(src_package_py) + addon_version = package.version + else: + addon_version = get_addon_version(addon_dir) addon_output_dir = output_dir / addon_dir.name / addon_version if addon_output_dir.exists(): @@ -189,22 +258,27 @@ def create_addon_package( addon_output_dir.mkdir(parents=True) # Copy server content - package_py = addon_output_dir / "package.py" - addon_name = addon_dir.name - if addon_name == "royal_render": - addon_name = "royalrender" - package_py_content = PACKAGE_PY_TEMPLATE.format( - addon_name=addon_name, addon_version=addon_version - ) + dst_package_py = addon_output_dir / "package.py" + if package is not None: + shutil.copy(src_package_py, dst_package_py) + else: + addon_name = addon_dir.name + if addon_name == "royal_render": + addon_name = "royalrender" + package_py_content = PACKAGE_PY_TEMPLATE.format( + addon_name=addon_name, addon_version=addon_version + ) - with open(package_py, "w+") as pkg_py: - pkg_py.write(package_py_content) + with open(dst_package_py, "w+") as pkg_py: + pkg_py.write(package_py_content) server_dir = addon_dir / "server" shutil.copytree( server_dir, addon_output_dir / "server", dirs_exist_ok=True ) + prepare_client_code(addon_dir, addon_output_dir, addon_version) + if create_zip: create_addon_zip( output_dir, addon_dir.name, addon_version, keep_source diff --git a/server_addon/deadline/server/settings/main.py b/server_addon/deadline/server/settings/main.py index 9537d6d550..83c7567c0d 100644 --- a/server_addon/deadline/server/settings/main.py +++ b/server_addon/deadline/server/settings/main.py @@ -1,3 +1,4 @@ +from typing import TYPE_CHECKING from pydantic import validator from ayon_server.settings import ( @@ -5,6 +6,8 @@ from ayon_server.settings import ( SettingsField, ensure_unique_names, ) +if TYPE_CHECKING: + from ayon_server.addons import BaseServerAddon from .publish_plugins import ( PublishPluginsModel, @@ -19,7 +22,7 @@ class ServerListSubmodel(BaseSettingsModel): async def defined_deadline_ws_name_enum_resolver( - addon: "BaseServerAddon", + addon: BaseServerAddon, settings_variant: str = "production", project_name: str | None = None, ) -> list[str]: diff --git a/server_addon/flame/server/settings/create_plugins.py b/server_addon/flame/server/settings/create_plugins.py index 44fb8a2e91..2f17ec40c4 100644 --- a/server_addon/flame/server/settings/create_plugins.py +++ b/server_addon/flame/server/settings/create_plugins.py @@ -87,7 +87,7 @@ class CreateShotClipModel(BaseSettingsModel): ) -class CreatePuginsModel(BaseSettingsModel): +class CreatePluginsModel(BaseSettingsModel): CreateShotClip: CreateShotClipModel = SettingsField( default_factory=CreateShotClipModel, title="Create Shot Clip" diff --git a/server_addon/flame/server/settings/main.py b/server_addon/flame/server/settings/main.py index 047f5af287..c838ee9646 100644 --- a/server_addon/flame/server/settings/main.py +++ b/server_addon/flame/server/settings/main.py @@ -1,8 +1,8 @@ from ayon_server.settings import BaseSettingsModel, SettingsField from .imageio import FlameImageIOModel, DEFAULT_IMAGEIO_SETTINGS -from .create_plugins import CreatePuginsModel, DEFAULT_CREATE_SETTINGS -from .publish_plugins import PublishPuginsModel, DEFAULT_PUBLISH_SETTINGS +from .create_plugins import CreatePluginsModel, DEFAULT_CREATE_SETTINGS +from .publish_plugins import PublishPluginsModel, DEFAULT_PUBLISH_SETTINGS from .loader_plugins import LoaderPluginsModel, DEFAULT_LOADER_SETTINGS @@ -11,12 +11,12 @@ class FlameSettings(BaseSettingsModel): default_factory=FlameImageIOModel, title="Color Management (ImageIO)" ) - create: CreatePuginsModel = SettingsField( - default_factory=CreatePuginsModel, + create: CreatePluginsModel = SettingsField( + default_factory=CreatePluginsModel, title="Create plugins" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish plugins" ) load: LoaderPluginsModel = SettingsField( diff --git a/server_addon/flame/server/settings/publish_plugins.py b/server_addon/flame/server/settings/publish_plugins.py index decb00fcfa..b34083b4e2 100644 --- a/server_addon/flame/server/settings/publish_plugins.py +++ b/server_addon/flame/server/settings/publish_plugins.py @@ -121,7 +121,7 @@ class IntegrateBatchGroupModel(BaseSettingsModel): ) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectTimelineInstances: CollectTimelineInstancesModel = SettingsField( default_factory=CollectTimelineInstancesModel, title="Collect Timeline Instances" diff --git a/server_addon/fusion/server/settings.py b/server_addon/fusion/server/settings.py index a913db16da..f16ae6e3e7 100644 --- a/server_addon/fusion/server/settings.py +++ b/server_addon/fusion/server/settings.py @@ -75,6 +75,12 @@ class HooksModel(BaseSettingsModel): default_factory=HookOptionalModel, title="Install PySide2" ) + FusionLaunchMenuHook: HookOptionalModel = SettingsField( + default_factory=HookOptionalModel, + title="Launch AYON Menu on Fusion Start", + description="Launch the AYON menu on Fusion application startup. " + "This is only supported for Fusion 18+" + ) class CreateSaverModel(CreateSaverPluginModel): @@ -143,6 +149,9 @@ DEFAULT_VALUES = { "hooks": { "InstallPySideToFusion": { "enabled": True + }, + "FusionLaunchMenuHook": { + "enabled": False } }, "create": { diff --git a/server_addon/fusion/server/version.py b/server_addon/fusion/server/version.py index bbab0242f6..1276d0254f 100644 --- a/server_addon/fusion/server/version.py +++ b/server_addon/fusion/server/version.py @@ -1 +1 @@ -__version__ = "0.1.4" +__version__ = "0.1.5" diff --git a/server_addon/hiero/server/settings/loader_plugins.py b/server_addon/hiero/server/settings/loader_plugins.py index b5a81d1ae2..682f9fd2d9 100644 --- a/server_addon/hiero/server/settings/loader_plugins.py +++ b/server_addon/hiero/server/settings/loader_plugins.py @@ -15,7 +15,7 @@ class LoadClipModel(BaseSettingsModel): ) -class LoaderPuginsModel(BaseSettingsModel): +class LoaderPluginsModel(BaseSettingsModel): LoadClip: LoadClipModel = SettingsField( default_factory=LoadClipModel, title="Load Clip" diff --git a/server_addon/hiero/server/settings/main.py b/server_addon/hiero/server/settings/main.py index b170ecafb8..378af6a539 100644 --- a/server_addon/hiero/server/settings/main.py +++ b/server_addon/hiero/server/settings/main.py @@ -9,11 +9,11 @@ from .create_plugins import ( DEFAULT_CREATE_SETTINGS ) from .loader_plugins import ( - LoaderPuginsModel, + LoaderPluginsModel, DEFAULT_LOADER_PLUGINS_SETTINGS ) from .publish_plugins import ( - PublishPuginsModel, + PublishPluginsModel, DEFAULT_PUBLISH_PLUGIN_SETTINGS ) from .scriptsmenu import ( @@ -35,12 +35,12 @@ class HieroSettings(BaseSettingsModel): default_factory=CreatorPluginsSettings, title="Creator Plugins", ) - load: LoaderPuginsModel = SettingsField( - default_factory=LoaderPuginsModel, + load: LoaderPluginsModel = SettingsField( + default_factory=LoaderPluginsModel, title="Loader plugins" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish plugins" ) scriptsmenu: ScriptsmenuSettings = SettingsField( diff --git a/server_addon/hiero/server/settings/publish_plugins.py b/server_addon/hiero/server/settings/publish_plugins.py index c35c61c332..0e43d4ce3a 100644 --- a/server_addon/hiero/server/settings/publish_plugins.py +++ b/server_addon/hiero/server/settings/publish_plugins.py @@ -49,7 +49,7 @@ class ExtractReviewCutUpVideoModel(BaseSettingsModel): ) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectInstanceVersion: CollectInstanceVersionModel = SettingsField( default_factory=CollectInstanceVersionModel, title="Collect Instance Version" diff --git a/server_addon/maya/server/settings/loaders.py b/server_addon/maya/server/settings/loaders.py index 418a7046ae..f59711b1e6 100644 --- a/server_addon/maya/server/settings/loaders.py +++ b/server_addon/maya/server/settings/loaders.py @@ -1,5 +1,9 @@ from ayon_server.settings import BaseSettingsModel, SettingsField -from ayon_server.types import ColorRGB_float, ColorRGBA_uint8 +from ayon_server.types import ColorRGBA_uint8 + + +class LoaderEnabledModel(BaseSettingsModel): + enabled: bool = SettingsField(title="Enabled") class ColorsSetting(BaseSettingsModel): @@ -94,6 +98,7 @@ class ReferenceLoaderModel(BaseSettingsModel): class ImportLoaderModel(BaseSettingsModel): + enabled: bool = SettingsField(title="Enabled") namespace: str = SettingsField(title="Namespace") group_name: str = SettingsField(title="Group name") @@ -113,6 +118,89 @@ class LoadersModel(BaseSettingsModel): title="Import Loader" ) + # Enable/disable loaders + ArnoldStandinLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Arnold Standin Loader" + ) + AssemblyLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Assembly Loader" + ) + AudioLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Audio Loader" + ) + GpuCacheLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="GPU Cache Loader" + ) + FileNodeLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="File Node (Image) Loader" + ) + ImagePlaneLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Image Plane Loader" + ) + LookLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Look Loader" + ) + MatchmoveLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Matchmove Loader" + ) + MultiverseUsdLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Multiverse USD Loader" + ) + MultiverseUsdOverLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Multiverse USD Override Loader" + ) + RedshiftProxyLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Redshift Proxy Loader" + ) + RenderSetupLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Render Setup Loader" + ) + LoadVDBtoArnold: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="VDB to Arnold Loader" + ) + LoadVDBtoRedShift: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="VDB to Redshift Loader" + ) + LoadVDBtoVRay: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="VDB to V-Ray Loader" + ) + VRayProxyLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Vray Proxy Loader" + ) + VRaySceneLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="VrayScene Loader" + ) + XgenLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Xgen Loader" + ) + YetiCacheLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Yeti Cache Loader" + ) + YetiRigLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Yeti Rig Loader" + ) + + DEFAULT_LOADERS_SETTING = { "colors": { "model": [209, 132, 30, 1.0], @@ -154,8 +242,29 @@ DEFAULT_LOADERS_SETTING = { "display_handle": True }, "import_loader": { + "enabled": True, "namespace": "{folder[name]}_{product[name]}_##_", "group_name": "_GRP", "display_handle": True - } + }, + "ArnoldStandinLoader": {"enabled": True}, + "AssemblyLoader": {"enabled": True}, + "AudioLoader": {"enabled": True}, + "FileNodeLoader": {"enabled": True}, + "GpuCacheLoader": {"enabled": True}, + "ImagePlaneLoader": {"enabled": True}, + "LookLoader": {"enabled": True}, + "MatchmoveLoader": {"enabled": True}, + "MultiverseUsdLoader": {"enabled": True}, + "MultiverseUsdOverLoader": {"enabled": True}, + "RedshiftProxyLoader": {"enabled": True}, + "RenderSetupLoader": {"enabled": True}, + "LoadVDBtoArnold": {"enabled": True}, + "LoadVDBtoRedShift": {"enabled": True}, + "LoadVDBtoVRay": {"enabled": True}, + "VRayProxyLoader": {"enabled": True}, + "VRaySceneLoader": {"enabled": True}, + "XgenLoader": {"enabled": True}, + "YetiCacheLoader": {"enabled": True}, + "YetiRigLoader": {"enabled": True}, } diff --git a/server_addon/maya/server/settings/publish_playblast.py b/server_addon/maya/server/settings/publish_playblast.py index 39f48bacbe..d513a43e99 100644 --- a/server_addon/maya/server/settings/publish_playblast.py +++ b/server_addon/maya/server/settings/publish_playblast.py @@ -6,7 +6,7 @@ from ayon_server.settings import ( ensure_unique_names, task_types_enum, ) -from ayon_server.types import ColorRGBA_uint8, ColorRGB_float +from ayon_server.types import ColorRGBA_uint8 def hardware_falloff_enum(): diff --git a/server_addon/maya/server/settings/publishers.py b/server_addon/maya/server/settings/publishers.py index f1e63f36be..27288053a2 100644 --- a/server_addon/maya/server/settings/publishers.py +++ b/server_addon/maya/server/settings/publishers.py @@ -316,6 +316,12 @@ class ExtractObjModel(BaseSettingsModel): optional: bool = SettingsField(title="Optional") +class ExtractModelModel(BaseSettingsModel): + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") + + class ExtractMayaSceneRawModel(BaseSettingsModel): """Add loaded instances to those published families:""" enabled: bool = SettingsField(title="ExtractMayaSceneRaw") @@ -372,7 +378,9 @@ class ExtractLookModel(BaseSettingsModel): class ExtractGPUCacheModel(BaseSettingsModel): - enabled: bool = True + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") families: list[str] = SettingsField(default_factory=list, title="Families") step: float = SettingsField(1.0, ge=1.0, title="Step") stepSave: int = SettingsField(1, ge=1, title="Step Save") @@ -799,6 +807,10 @@ class PublishersModel(BaseSettingsModel): default_factory=ExtractGPUCacheModel, title="Extract GPU Cache", ) + ExtractModel: ExtractModelModel = SettingsField( + default_factory=ExtractModelModel, + title="Extract Model (Maya Scene)" + ) DEFAULT_SUFFIX_NAMING = { @@ -1341,6 +1353,8 @@ DEFAULT_PUBLISH_SETTINGS = { }, "ExtractGPUCache": { "enabled": False, + "optional": False, + "active": True, "families": [ "model", "animation", @@ -1353,5 +1367,10 @@ DEFAULT_PUBLISH_SETTINGS = { "optimizeAnimationsForMotionBlur": True, "writeMaterials": True, "useBaseTessellation": True + }, + "ExtractModel": { + "enabled": True, + "optional": True, + "active": True, } } diff --git a/server_addon/maya/server/settings/scriptsmenu.py b/server_addon/maya/server/settings/scriptsmenu.py index d01dff1621..7b0ba7d831 100644 --- a/server_addon/maya/server/settings/scriptsmenu.py +++ b/server_addon/maya/server/settings/scriptsmenu.py @@ -1,3 +1,7 @@ +import json + +from pydantic import validator +from ayon_server.exceptions import BadRequestException from ayon_server.settings import BaseSettingsModel, SettingsField @@ -14,19 +18,60 @@ class ScriptsmenuSubmodel(BaseSettingsModel): ) +_definition_mode_type = [ + {"value": "definition", "label": "Menu Builder"}, + {"value": "definition_json", "label": "Raw JSON (advanced)"} +] + + class ScriptsmenuModel(BaseSettingsModel): + """Add a custom scripts menu to Maya""" _isGroup = True name: str = SettingsField(title="Menu Name") + + definition_type: str = SettingsField( + title="Define menu using", + description="Choose the way to define the custom scripts menu " + "via settings", + enum_resolver=lambda: _definition_mode_type, + conditionalEnum=True, + default="definition" + ) definition: list[ScriptsmenuSubmodel] = SettingsField( default_factory=list, title="Menu Definition", description="Scriptmenu Items Definition" ) + definition_json: str = SettingsField( + "[]", title="Menu Definition JSON", widget="textarea", + description=( + "Define the custom tools menu using a JSON list. " + "For more details on the JSON format, see " + "[here](https://github.com/Colorbleed/scriptsmenu?tab=readme-ov-file#configuration)." # noqa: E501 + ) + ) + + @validator("definition_json") + def validate_json(cls, value): + if not value.strip(): + return "[]" + try: + converted_value = json.loads(value) + success = isinstance(converted_value, list) + except json.JSONDecodeError: + success = False + + if not success: + raise BadRequestException( + "The definition can't be parsed as json list object" + ) + return value DEFAULT_SCRIPTSMENU_SETTINGS = { "name": "Custom Tools", + "definition_type": "definition", "definition": [ { "type": "action", @@ -39,5 +84,6 @@ DEFAULT_SCRIPTSMENU_SETTINGS = { "shader" ] } - ] + ], + "definition_json": "[]" } diff --git a/server_addon/maya/server/version.py b/server_addon/maya/server/version.py index 71b4bc4ca6..75b463f198 100644 --- a/server_addon/maya/server/version.py +++ b/server_addon/maya/server/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring addon version.""" -__version__ = "0.1.12" +__version__ = "0.1.15" diff --git a/server_addon/nuke/server/settings/loader_plugins.py b/server_addon/nuke/server/settings/loader_plugins.py index a5c3315fd4..531ea8d986 100644 --- a/server_addon/nuke/server/settings/loader_plugins.py +++ b/server_addon/nuke/server/settings/loader_plugins.py @@ -42,7 +42,7 @@ class LoadClipModel(BaseSettingsModel): ) -class LoaderPuginsModel(BaseSettingsModel): +class LoaderPluginsModel(BaseSettingsModel): LoadImage: LoadImageModel = SettingsField( default_factory=LoadImageModel, title="Load Image" diff --git a/server_addon/nuke/server/settings/main.py b/server_addon/nuke/server/settings/main.py index 936686d6ce..1fd347cc21 100644 --- a/server_addon/nuke/server/settings/main.py +++ b/server_addon/nuke/server/settings/main.py @@ -28,11 +28,11 @@ from .create_plugins import ( DEFAULT_CREATE_SETTINGS ) from .publish_plugins import ( - PublishPuginsModel, + PublishPluginsModel, DEFAULT_PUBLISH_PLUGIN_SETTINGS ) from .loader_plugins import ( - LoaderPuginsModel, + LoaderPluginsModel, DEFAULT_LOADER_PLUGINS_SETTINGS ) from .workfile_builder import ( @@ -75,13 +75,13 @@ class NukeSettings(BaseSettingsModel): title="Creator Plugins", ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish Plugins", ) - load: LoaderPuginsModel = SettingsField( - default_factory=LoaderPuginsModel, + load: LoaderPluginsModel = SettingsField( + default_factory=LoaderPluginsModel, title="Loader Plugins", ) diff --git a/server_addon/nuke/server/settings/publish_plugins.py b/server_addon/nuke/server/settings/publish_plugins.py index 7d9c914fee..d5b05d8715 100644 --- a/server_addon/nuke/server/settings/publish_plugins.py +++ b/server_addon/nuke/server/settings/publish_plugins.py @@ -219,7 +219,7 @@ class IncrementScriptVersionModel(BaseSettingsModel): active: bool = SettingsField(title="Active") -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectInstanceData: CollectInstanceDataModel = SettingsField( title="Collect Instance Version", default_factory=CollectInstanceDataModel, diff --git a/server_addon/resolve/server/settings.py b/server_addon/resolve/server/settings.py index dcdb2f1b27..d9cbb98340 100644 --- a/server_addon/resolve/server/settings.py +++ b/server_addon/resolve/server/settings.py @@ -69,7 +69,7 @@ class CreateShotClipModels(BaseSettingsModel): ) -class CreatorPuginsModel(BaseSettingsModel): +class CreatorPluginsModel(BaseSettingsModel): CreateShotClip: CreateShotClipModels = SettingsField( default_factory=CreateShotClipModels, title="Create Shot Clip" @@ -84,8 +84,8 @@ class ResolveSettings(BaseSettingsModel): default_factory=ResolveImageIOModel, title="Color Management (ImageIO)" ) - create: CreatorPuginsModel = SettingsField( - default_factory=CreatorPuginsModel, + create: CreatorPluginsModel = SettingsField( + default_factory=CreatorPluginsModel, title="Creator plugins", ) diff --git a/server_addon/traypublisher/server/settings/simple_creators.py b/server_addon/traypublisher/server/settings/simple_creators.py index 924eeedd23..6b979bbe52 100644 --- a/server_addon/traypublisher/server/settings/simple_creators.py +++ b/server_addon/traypublisher/server/settings/simple_creators.py @@ -142,6 +142,7 @@ DEFAULT_SIMPLE_CREATORS = [ "extensions": [ ".exr", ".png", + ".dng", ".dpx", ".jpg", ".tiff", @@ -165,6 +166,7 @@ DEFAULT_SIMPLE_CREATORS = [ "extensions": [ ".exr", ".png", + ".dng", ".dpx", ".jpg", ".jpeg", @@ -215,6 +217,7 @@ DEFAULT_SIMPLE_CREATORS = [ ".exr", ".jpg", ".jpeg", + ".dng", ".dpx", ".bmp", ".tif", diff --git a/server_addon/traypublisher/server/version.py b/server_addon/traypublisher/server/version.py index e57ad00718..de699158fd 100644 --- a/server_addon/traypublisher/server/version.py +++ b/server_addon/traypublisher/server/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring addon version.""" -__version__ = "0.1.3" +__version__ = "0.1.4" diff --git a/server_addon/tvpaint/server/settings/publish_plugins.py b/server_addon/tvpaint/server/settings/publish_plugins.py index 0d978e5714..db1c7bd11a 100644 --- a/server_addon/tvpaint/server/settings/publish_plugins.py +++ b/server_addon/tvpaint/server/settings/publish_plugins.py @@ -1,5 +1,5 @@ from ayon_server.settings import BaseSettingsModel, SettingsField -from ayon_server.types import ColorRGBA_uint8, ColorRGB_uint8 +from ayon_server.types import ColorRGBA_uint8 class CollectRenderInstancesModel(BaseSettingsModel):