diff --git a/client/ayon_core/__init__.py b/client/ayon_core/__init__.py index 7d95587e8a..ce5a28601c 100644 --- a/client/ayon_core/__init__.py +++ b/client/ayon_core/__init__.py @@ -14,3 +14,15 @@ AYON_SERVER_ENABLED = True # Indicate if AYON entities should be used instead of OpenPype entities USE_AYON_ENTITIES = True # ------------------------- + + +__all__ = ( + "__version__", + + # Deprecated + "AYON_CORE_ROOT", + "PACKAGE_DIR", + "PLUGINS_DIR", + "AYON_SERVER_ENABLED", + "USE_AYON_ENTITIES", +) diff --git a/client/ayon_core/addon/base.py b/client/ayon_core/addon/base.py index 42b53c59e3..6ef838652e 100644 --- a/client/ayon_core/addon/base.py +++ b/client/ayon_core/addon/base.py @@ -16,6 +16,7 @@ import six import appdirs import ayon_api +from ayon_core import AYON_CORE_ROOT from ayon_core.lib import Logger, is_dev_mode_enabled from ayon_core.settings import get_studio_settings @@ -335,14 +336,70 @@ def _load_ayon_addons(openpype_modules, modules_key, log): return addons_to_skip_in_core +def _load_ayon_core_addons_dir( + ignore_addon_names, openpype_modules, modules_key, log +): + addons_dir = os.path.join(AYON_CORE_ROOT, "addons") + if not os.path.exists(addons_dir): + return + + imported_modules = [] + + # Make sure that addons which already have client code are not loaded + # from core again, with older code + filtered_paths = [] + for name in os.listdir(addons_dir): + if name in ignore_addon_names: + continue + path = os.path.join(addons_dir, name) + if os.path.isdir(path): + filtered_paths.append(path) + + for path in filtered_paths: + while path in sys.path: + sys.path.remove(path) + sys.path.insert(0, path) + + for name in os.listdir(path): + fullpath = os.path.join(path, name) + if os.path.isfile(fullpath): + basename, ext = os.path.splitext(name) + if ext != ".py": + continue + else: + basename = name + try: + module = __import__(basename, fromlist=("",)) + for attr_name in dir(module): + attr = getattr(module, attr_name) + if ( + inspect.isclass(attr) + and issubclass(attr, AYONAddon) + ): + new_import_str = "{}.{}".format(modules_key, basename) + sys.modules[new_import_str] = module + setattr(openpype_modules, basename, module) + imported_modules.append(module) + break + + except Exception: + log.error( + "Failed to import addon '{}'.".format(fullpath), + exc_info=True + ) + return imported_modules + + def _load_addons_in_core( ignore_addon_names, openpype_modules, modules_key, log ): + _load_ayon_core_addons_dir( + ignore_addon_names, openpype_modules, modules_key, log + ) # Add current directory at first place # - has small differences in import logic - current_dir = os.path.abspath(os.path.dirname(__file__)) - hosts_dir = os.path.join(os.path.dirname(current_dir), "hosts") - modules_dir = os.path.join(os.path.dirname(current_dir), "modules") + hosts_dir = os.path.join(AYON_CORE_ROOT, "hosts") + modules_dir = os.path.join(AYON_CORE_ROOT, "modules") ignored_host_names = set(IGNORED_HOSTS_IN_AYON) ignored_module_dir_filenames = ( @@ -1075,7 +1132,7 @@ class AddonsManager: """Print out report of time spent on addons initialization parts. Reporting is not automated must be implemented for each initialization - part separatelly. Reports must be stored to `_report` attribute. + part separately. Reports must be stored to `_report` attribute. Print is skipped if `_report` is empty. Attribute `_report` is dictionary where key is "label" describing @@ -1267,7 +1324,7 @@ class TrayAddonsManager(AddonsManager): def add_doubleclick_callback(self, addon, callback): """Register doubleclick callbacks on tray icon. - Currently there is no way how to determine which is launched. Name of + Currently, there is no way how to determine which is launched. Name of callback can be defined with `doubleclick_callback` attribute. Missing feature how to define default callback. diff --git a/client/ayon_core/addons/applications/ayon_applications/__init__.py b/client/ayon_core/addons/applications/ayon_applications/__init__.py new file mode 100644 index 0000000000..b4a50279ab --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/__init__.py @@ -0,0 +1,58 @@ +from .constants import ( + APPLICATIONS_ADDON_ROOT, + DEFAULT_ENV_SUBGROUP, + PLATFORM_NAMES, +) +from .exceptions import ( + ApplicationNotFound, + ApplicationExecutableNotFound, + ApplicationLaunchFailed, + MissingRequiredKey, +) +from .defs import ( + LaunchTypes, + ApplicationExecutable, + UndefinedApplicationExecutable, + ApplicationGroup, + Application, + EnvironmentToolGroup, + EnvironmentTool, +) +from .hooks import ( + LaunchHook, + PreLaunchHook, + PostLaunchHook, +) +from .manager import ( + ApplicationManager, + ApplicationLaunchContext, +) +from .addon import ApplicationsAddon + + +__all__ = ( + "DEFAULT_ENV_SUBGROUP", + "PLATFORM_NAMES", + + "ApplicationNotFound", + "ApplicationExecutableNotFound", + "ApplicationLaunchFailed", + "MissingRequiredKey", + + "LaunchTypes", + "ApplicationExecutable", + "UndefinedApplicationExecutable", + "ApplicationGroup", + "Application", + "EnvironmentToolGroup", + "EnvironmentTool", + + "LaunchHook", + "PreLaunchHook", + "PostLaunchHook", + + "ApplicationManager", + "ApplicationLaunchContext", + + "ApplicationsAddon", +) diff --git a/client/ayon_core/addons/applications/ayon_applications/addon.py b/client/ayon_core/addons/applications/ayon_applications/addon.py new file mode 100644 index 0000000000..0f1b68af0e --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/addon.py @@ -0,0 +1,173 @@ +import os +import json + +from ayon_core.addon import AYONAddon, IPluginPaths, click_wrap + +from .constants import APPLICATIONS_ADDON_ROOT +from .defs import LaunchTypes +from .manager import ApplicationManager + + +class ApplicationsAddon(AYONAddon, IPluginPaths): + name = "applications" + + def initialize(self, settings): + # TODO remove when addon is removed from ayon-core + self.enabled = self.name in settings + + def get_app_environments_for_context( + self, + project_name, + folder_path, + task_name, + full_app_name, + env_group=None, + launch_type=None, + env=None, + ): + """Calculate environment variables for launch context. + + Args: + project_name (str): Project name. + folder_path (str): Folder path. + task_name (str): Task name. + full_app_name (str): Full application name. + env_group (Optional[str]): Environment group. + launch_type (Optional[str]): Launch type. + env (Optional[dict[str, str]]): Environment variables to update. + + Returns: + dict[str, str]: Environment variables for context. + + """ + from ayon_applications.utils import get_app_environments_for_context + + if not full_app_name: + return {} + + return get_app_environments_for_context( + project_name, + folder_path, + task_name, + full_app_name, + env_group=env_group, + launch_type=launch_type, + env=env, + addons_manager=self.manager + ) + + def get_farm_publish_environment_variables( + self, + project_name, + folder_path, + task_name, + full_app_name=None, + env_group=None, + ): + """Calculate environment variables for farm publish. + + Args: + project_name (str): Project name. + folder_path (str): Folder path. + task_name (str): Task name. + env_group (Optional[str]): Environment group. + full_app_name (Optional[str]): Full application name. Value from + environment variable 'AYON_APP_NAME' is used if 'None' is + passed. + + Returns: + dict[str, str]: Environment variables for farm publish. + + """ + if full_app_name is None: + full_app_name = os.getenv("AYON_APP_NAME") + + return self.get_app_environments_for_context( + project_name, + folder_path, + task_name, + full_app_name, + env_group=env_group, + launch_type=LaunchTypes.farm_publish + ) + + def get_applications_manager(self, settings=None): + """Get applications manager. + + Args: + settings (Optional[dict]): Studio/project settings. + + Returns: + ApplicationManager: Applications manager. + + """ + return ApplicationManager(settings) + + def get_plugin_paths(self): + return { + "publish": [ + os.path.join(APPLICATIONS_ADDON_ROOT, "plugins", "publish") + ] + } + + # --- CLI --- + def cli(self, addon_click_group): + main_group = click_wrap.group( + self._cli_main, name=self.name, help="Applications addon" + ) + ( + main_group.command( + self._cli_extract_environments, + name="extractenvironments", + help=( + "Extract environment variables for context into json file" + ) + ) + .argument("output_json_path") + .option("--project", help="Project name", default=None) + .option("--folder", help="Folder path", default=None) + .option("--task", help="Task name", default=None) + .option("--app", help="Application name", default=None) + .option( + "--envgroup", + help="Environment group (e.g. \"farm\")", + default=None + ) + ) + # Convert main command to click object and add it to parent group + addon_click_group.add_command( + main_group.to_click_obj() + ) + + def _cli_main(self): + pass + + def _cli_extract_environments( + self, output_json_path, project, folder, task, app, envgroup + ): + """Produces json file with environment based on project and app. + + Called by farm integration to propagate environment into farm jobs. + + Args: + output_json_path (str): Output json file path. + project (str): Project name. + folder (str): Folder path. + task (str): Task name. + app (str): Full application name e.g. 'maya/2024'. + envgroup (str): Environment group. + + """ + if all((project, folder, task, app)): + env = self.get_farm_publish_environment_variables( + project, folder, task, app, env_group=envgroup, + ) + else: + env = os.environ.copy() + + output_dir = os.path.dirname(output_json_path) + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + with open(output_json_path, "w") as file_stream: + json.dump(env, file_stream, indent=4) diff --git a/client/ayon_core/addons/applications/ayon_applications/constants.py b/client/ayon_core/addons/applications/ayon_applications/constants.py new file mode 100644 index 0000000000..92c8f4f254 --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/constants.py @@ -0,0 +1,6 @@ +import os + +APPLICATIONS_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__)) + +PLATFORM_NAMES = {"windows", "linux", "darwin"} +DEFAULT_ENV_SUBGROUP = "standard" diff --git a/client/ayon_core/addons/applications/ayon_applications/defs.py b/client/ayon_core/addons/applications/ayon_applications/defs.py new file mode 100644 index 0000000000..5cc36041a1 --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/defs.py @@ -0,0 +1,404 @@ +import os +import platform +import json +import copy + +from ayon_core.lib import find_executable + + +class LaunchTypes: + """Launch types are filters for pre/post-launch hooks. + + Please use these variables in case they'll change values. + """ + + # Local launch - application is launched on local machine + local = "local" + # Farm render job - application is on farm + farm_render = "farm-render" + # Farm publish job - integration post-render job + farm_publish = "farm-publish" + # Remote launch - application is launched on remote machine from which + # can be started publishing + remote = "remote" + # Automated launch - application is launched with automated publishing + automated = "automated" + + +class ApplicationExecutable: + """Representation of executable loaded from settings.""" + + def __init__(self, executable): + # Try to format executable with environments + try: + executable = executable.format(**os.environ) + except Exception: + pass + + # On MacOS check if exists path to executable when ends with `.app` + # - it is common that path will lead to "/Applications/Blender" but + # real path is "/Applications/Blender.app" + if platform.system().lower() == "darwin": + executable = self.macos_executable_prep(executable) + + self.executable_path = executable + + def __str__(self): + return self.executable_path + + def __repr__(self): + return "<{}> {}".format(self.__class__.__name__, self.executable_path) + + @staticmethod + def macos_executable_prep(executable): + """Try to find full path to executable file. + + Real executable is stored in '*.app/Contents/MacOS/'. + + Having path to '*.app' gives ability to read it's plist info and + use "CFBundleExecutable" key from plist to know what is "executable." + + Plist is stored in '*.app/Contents/Info.plist'. + + This is because some '*.app' directories don't have same permissions + as real executable. + """ + # Try to find if there is `.app` file + if not os.path.exists(executable): + _executable = executable + ".app" + if os.path.exists(_executable): + executable = _executable + + # Try to find real executable if executable has `Contents` subfolder + contents_dir = os.path.join(executable, "Contents") + if os.path.exists(contents_dir): + executable_filename = None + # Load plist file and check for bundle executable + plist_filepath = os.path.join(contents_dir, "Info.plist") + if os.path.exists(plist_filepath): + import plistlib + + if hasattr(plistlib, "load"): + with open(plist_filepath, "rb") as stream: + parsed_plist = plistlib.load(stream) + else: + parsed_plist = plistlib.readPlist(plist_filepath) + executable_filename = parsed_plist.get("CFBundleExecutable") + + if executable_filename: + executable = os.path.join( + contents_dir, "MacOS", executable_filename + ) + + return executable + + def as_args(self): + return [self.executable_path] + + def _realpath(self): + """Check if path is valid executable path.""" + # Check for executable in PATH + result = find_executable(self.executable_path) + if result is not None: + return result + + # This is not 100% validation but it is better than remove ability to + # launch .bat, .sh or extentionless files + if os.path.exists(self.executable_path): + return self.executable_path + return None + + def exists(self): + if not self.executable_path: + return False + return bool(self._realpath()) + + +class UndefinedApplicationExecutable(ApplicationExecutable): + """Some applications do not require executable path from settings. + + In that case this class is used to "fake" existing executable. + """ + def __init__(self): + pass + + def __str__(self): + return self.__class__.__name__ + + def __repr__(self): + return "<{}>".format(self.__class__.__name__) + + def as_args(self): + return [] + + def exists(self): + return True + + +class ApplicationGroup: + """Hold information about application group. + + Application group wraps different versions(variants) of application. + e.g. "maya" is group and "maya_2020" is variant. + + Group hold `host_name` which is implementation name used in AYON. Also + holds `enabled` if whole app group is enabled or `icon` for application + icon path in resources. + + Group has also `environment` which hold same environments for all variants. + + Args: + name (str): Groups' name. + data (dict): Group defying data loaded from settings. + manager (ApplicationManager): Manager that created the group. + """ + + def __init__(self, name, data, manager): + self.name = name + self.manager = manager + self._data = data + + self.enabled = data["enabled"] + self.label = data["label"] or None + self.icon = data["icon"] or None + env = {} + try: + env = json.loads(data["environment"]) + except Exception: + pass + self._environment = env + + host_name = data["host_name"] or None + self.is_host = host_name is not None + self.host_name = host_name + + settings_variants = data["variants"] + variants = {} + for variant_data in settings_variants: + app_variant = Application(variant_data, self) + variants[app_variant.name] = app_variant + + self.variants = variants + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.name) + + def __iter__(self): + for variant in self.variants.values(): + yield variant + + @property + def environment(self): + return copy.deepcopy(self._environment) + + +class Application: + """Hold information about application. + + Object by itself does nothing special. + + Args: + data (dict): Data for the version containing information about + executables, variant label or if is enabled. + Only required key is `executables`. + group (ApplicationGroup): App group object that created the application + and under which application belongs. + + """ + def __init__(self, data, group): + self._data = data + name = data["name"] + label = data["label"] or name + enabled = False + if group.enabled: + enabled = data.get("enabled", True) + + if group.label: + full_label = " ".join((group.label, label)) + else: + full_label = label + env = {} + try: + env = json.loads(data["environment"]) + except Exception: + pass + + arguments = data["arguments"] + if isinstance(arguments, dict): + arguments = arguments.get(platform.system().lower()) + + if not arguments: + arguments = [] + + _executables = data["executables"].get(platform.system().lower(), []) + executables = [ + ApplicationExecutable(executable) + for executable in _executables + ] + + self.group = group + + self.name = name + self.label = label + self.enabled = enabled + self.use_python_2 = data.get("use_python_2", False) + + self.full_name = "/".join((group.name, name)) + self.full_label = full_label + self.arguments = arguments + self.executables = executables + self._environment = env + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.full_name) + + @property + def environment(self): + return copy.deepcopy(self._environment) + + @property + def manager(self): + return self.group.manager + + @property + def host_name(self): + return self.group.host_name + + @property + def icon(self): + return self.group.icon + + @property + def is_host(self): + return self.group.is_host + + def find_executable(self): + """Try to find existing executable for application. + + Returns (str): Path to executable from `executables` or None if any + exists. + """ + for executable in self.executables: + if executable.exists(): + return executable + return None + + def launch(self, *args, **kwargs): + """Launch the application. + + For this purpose is used manager's launch method to keep logic at one + place. + + Arguments must match with manager's launch method. That's why *args + **kwargs are used. + + Returns: + subprocess.Popen: Return executed process as Popen object. + """ + return self.manager.launch(self.full_name, *args, **kwargs) + + +class EnvironmentToolGroup: + """Hold information about environment tool group. + + Environment tool group may hold different variants of same tool and set + environments that are same for all of them. + + e.g. "mtoa" may have different versions but all environments except one + are same. + + Args: + data (dict): Group information with variants. + manager (ApplicationManager): Manager that creates the group. + """ + + def __init__(self, data, manager): + name = data["name"] + label = data["label"] + + self.name = name + self.label = label + self._data = data + self.manager = manager + + environment = {} + try: + environment = json.loads(data["environment"]) + except Exception: + pass + self._environment = environment + + variants = data.get("variants") or [] + variants_by_name = {} + for variant_data in variants: + tool = EnvironmentTool(variant_data, self) + variants_by_name[tool.name] = tool + self.variants = variants_by_name + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.name) + + def __iter__(self): + for variant in self.variants.values(): + yield variant + + @property + def environment(self): + return copy.deepcopy(self._environment) + + +class EnvironmentTool: + """Hold information about application tool. + + Structure of tool information. + + Args: + variant_data (dict): Variant data with environments and + host and app variant filters. + group (EnvironmentToolGroup): Name of group which wraps tool. + """ + + def __init__(self, variant_data, group): + # Backwards compatibility 3.9.1 - 3.9.2 + # - 'variant_data' contained only environments but contain also host + # and application variant filters + name = variant_data["name"] + label = variant_data["label"] + host_names = variant_data["host_names"] + app_variants = variant_data["app_variants"] + + environment = {} + try: + environment = json.loads(variant_data["environment"]) + except Exception: + pass + + self.host_names = host_names + self.app_variants = app_variants + self.name = name + self.variant_label = label + self.label = " ".join((group.label, label)) + self.group = group + + self._environment = environment + self.full_name = "/".join((group.name, name)) + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.full_name) + + @property + def environment(self): + return copy.deepcopy(self._environment) + + def is_valid_for_app(self, app): + """Is tool valid for application. + + Args: + app (Application): Application for which are prepared environments. + """ + if self.app_variants and app.full_name not in self.app_variants: + return False + + if self.host_names and app.host_name not in self.host_names: + return False + return True diff --git a/client/ayon_core/addons/applications/ayon_applications/exceptions.py b/client/ayon_core/addons/applications/ayon_applications/exceptions.py new file mode 100644 index 0000000000..d5a48d3b6b --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/exceptions.py @@ -0,0 +1,50 @@ +class ApplicationNotFound(Exception): + """Application was not found in ApplicationManager by name.""" + + def __init__(self, app_name): + self.app_name = app_name + super(ApplicationNotFound, self).__init__( + "Application \"{}\" was not found.".format(app_name) + ) + + +class ApplicationExecutableNotFound(Exception): + """Defined executable paths are not available on the machine.""" + + def __init__(self, application): + self.application = application + details = None + if not application.executables: + msg = ( + "Executable paths for application \"{}\"({}) are not set." + ) + else: + msg = ( + "Defined executable paths for application \"{}\"({})" + " are not available on this machine." + ) + details = "Defined paths:" + for executable in application.executables: + details += "\n- " + executable.executable_path + + self.msg = msg.format(application.full_label, application.full_name) + self.details = details + + exc_mgs = str(self.msg) + if details: + # Is good idea to pass new line symbol to exception message? + exc_mgs += "\n" + details + self.exc_msg = exc_mgs + super(ApplicationExecutableNotFound, self).__init__(exc_mgs) + + +class ApplicationLaunchFailed(Exception): + """Application launch failed due to known reason. + + Message should be self explanatory as traceback won't be shown. + """ + pass + + +class MissingRequiredKey(KeyError): + pass diff --git a/client/ayon_core/addons/applications/ayon_applications/hooks.py b/client/ayon_core/addons/applications/ayon_applications/hooks.py new file mode 100644 index 0000000000..6aa12a210a --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/hooks.py @@ -0,0 +1,150 @@ +import platform +from abc import ABCMeta, abstractmethod + +import six + +from ayon_core.lib import Logger + +from .defs import LaunchTypes + + +@six.add_metaclass(ABCMeta) +class LaunchHook: + """Abstract base class of launch hook.""" + # Order of prelaunch hook, will be executed as last if set to None. + order = None + # List of host implementations, skipped if empty. + hosts = set() + # Set of application groups + app_groups = set() + # Set of specific application names + app_names = set() + # Set of platform availability + platforms = set() + # Set of launch types for which is available + # - if empty then is available for all launch types + # - by default has 'local' which is most common reason for launc hooks + launch_types = {LaunchTypes.local} + + def __init__(self, launch_context): + """Constructor of launch hook. + + Always should be called + """ + self.log = Logger.get_logger(self.__class__.__name__) + + self.launch_context = launch_context + + is_valid = self.class_validation(launch_context) + if is_valid: + is_valid = self.validate() + + self.is_valid = is_valid + + @classmethod + def class_validation(cls, launch_context): + """Validation of class attributes by launch context. + + Args: + launch_context (ApplicationLaunchContext): Context of launching + application. + + Returns: + bool: Is launch hook valid for the context by class attributes. + """ + if cls.platforms: + low_platforms = tuple( + _platform.lower() + for _platform in cls.platforms + ) + if platform.system().lower() not in low_platforms: + return False + + if cls.hosts: + if launch_context.host_name not in cls.hosts: + return False + + if cls.app_groups: + if launch_context.app_group.name not in cls.app_groups: + return False + + if cls.app_names: + if launch_context.app_name not in cls.app_names: + return False + + if cls.launch_types: + if launch_context.launch_type not in cls.launch_types: + return False + + return True + + @property + def data(self): + return self.launch_context.data + + @property + def application(self): + return getattr(self.launch_context, "application", None) + + @property + def manager(self): + return getattr(self.application, "manager", None) + + @property + def host_name(self): + return getattr(self.application, "host_name", None) + + @property + def app_group(self): + return getattr(self.application, "group", None) + + @property + def app_name(self): + return getattr(self.application, "full_name", None) + + @property + def addons_manager(self): + return getattr(self.launch_context, "addons_manager", None) + + @property + def modules_manager(self): + """ + Deprecated: + Use 'addons_wrapper' instead. + """ + return self.addons_manager + + def validate(self): + """Optional validation of launch hook on initialization. + + Returns: + bool: Hook is valid (True) or invalid (False). + """ + # QUESTION Not sure if this method has any usable potential. + # - maybe result can be based on settings + return True + + @abstractmethod + def execute(self, *args, **kwargs): + """Abstract execute method where logic of hook is.""" + pass + + +class PreLaunchHook(LaunchHook): + """Abstract class of prelaunch hook. + + This launch hook will be processed before application is launched. + + If any exception will happen during processing the application won't be + launched. + """ + + +class PostLaunchHook(LaunchHook): + """Abstract class of postlaunch hook. + + This launch hook will be processed after application is launched. + + Nothing will happen if any exception will happen during processing. And + processing of other postlaunch hooks won't stop either. + """ diff --git a/client/ayon_core/addons/applications/ayon_applications/manager.py b/client/ayon_core/addons/applications/ayon_applications/manager.py new file mode 100644 index 0000000000..dca2ff4491 --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/manager.py @@ -0,0 +1,676 @@ +import os +import sys +import copy +import json +import tempfile +import platform +import inspect +import subprocess + +import six + +from ayon_core import AYON_CORE_ROOT +from ayon_core.settings import get_studio_settings +from ayon_core.lib import ( + Logger, + modules_from_path, + classes_from_module, + get_linux_launcher_args, +) +from ayon_core.addon import AddonsManager + +from .constants import DEFAULT_ENV_SUBGROUP +from .exceptions import ( + ApplicationNotFound, + ApplicationExecutableNotFound, +) +from .hooks import PostLaunchHook, PreLaunchHook +from .defs import EnvironmentToolGroup, ApplicationGroup, LaunchTypes + + +class ApplicationManager: + """Load applications and tools and store them by their full name. + + Args: + studio_settings (dict): Preloaded studio settings. When passed manager + will always use these values. Gives ability to create manager + using different settings. + """ + + def __init__(self, studio_settings=None): + self.log = Logger.get_logger(self.__class__.__name__) + + self.app_groups = {} + self.applications = {} + self.tool_groups = {} + self.tools = {} + + self._studio_settings = studio_settings + + self.refresh() + + def set_studio_settings(self, studio_settings): + """Ability to change init system settings. + + This will trigger refresh of manager. + """ + self._studio_settings = studio_settings + + self.refresh() + + def refresh(self): + """Refresh applications from settings.""" + self.app_groups.clear() + self.applications.clear() + self.tool_groups.clear() + self.tools.clear() + + if self._studio_settings is not None: + settings = copy.deepcopy(self._studio_settings) + else: + settings = get_studio_settings( + clear_metadata=False, exclude_locals=False + ) + + applications_addon_settings = settings["applications"] + + # Prepare known applications + app_defs = applications_addon_settings["applications"] + additional_apps = app_defs.pop("additional_apps") + for additional_app in additional_apps: + app_name = additional_app.pop("name") + if app_name in app_defs: + self.log.warning(( + "Additional application '{}' is already" + " in built-in applications." + ).format(app_name)) + app_defs[app_name] = additional_app + + for group_name, variant_defs in app_defs.items(): + group = ApplicationGroup(group_name, variant_defs, self) + self.app_groups[group_name] = group + for app in group: + self.applications[app.full_name] = app + + tools_definitions = applications_addon_settings["tool_groups"] + for tool_group_data in tools_definitions: + group = EnvironmentToolGroup(tool_group_data, self) + self.tool_groups[group.name] = group + for tool in group: + self.tools[tool.full_name] = tool + + def find_latest_available_variant_for_group(self, group_name): + group = self.app_groups.get(group_name) + if group is None or not group.enabled: + return None + + output = None + for _, variant in reversed(sorted(group.variants.items())): + executable = variant.find_executable() + if executable: + output = variant + break + return output + + def create_launch_context(self, app_name, **data): + """Prepare launch context for application. + + Args: + app_name (str): Name of application that should be launched. + **data (Any): Any additional data. Data may be used during + + Returns: + ApplicationLaunchContext: Launch context for application. + + Raises: + ApplicationNotFound: Application was not found by entered name. + """ + + app = self.applications.get(app_name) + if not app: + raise ApplicationNotFound(app_name) + + executable = app.find_executable() + + return ApplicationLaunchContext( + app, executable, **data + ) + + def launch_with_context(self, launch_context): + """Launch application using existing launch context. + + Args: + launch_context (ApplicationLaunchContext): Prepared launch + context. + """ + + if not launch_context.executable: + raise ApplicationExecutableNotFound(launch_context.application) + return launch_context.launch() + + def launch(self, app_name, **data): + """Launch procedure. + + For host application it's expected to contain "project_name", + "folder_path" and "task_name". + + Args: + app_name (str): Name of application that should be launched. + **data (dict): Any additional data. Data may be used during + preparation to store objects usable in multiple places. + + Raises: + ApplicationNotFound: Application was not found by entered + argument `app_name`. + ApplicationExecutableNotFound: Executables in application definition + were not found on this machine. + ApplicationLaunchFailed: Something important for application launch + failed. Exception should contain explanation message, + traceback should not be needed. + """ + + context = self.create_launch_context(app_name, **data) + return self.launch_with_context(context) + + +class ApplicationLaunchContext: + """Context of launching application. + + Main purpose of context is to prepare launch arguments and keyword + arguments for new process. Most important part of keyword arguments + preparations are environment variables. + + During the whole process is possible to use `data` attribute to store + object usable in multiple places. + + Launch arguments are strings in list. It is possible to "chain" argument + when order of them matters. That is possible to do with adding list where + order is right and should not change. + NOTE: This is recommendation, not requirement. + e.g.: `["nuke.exe", "--NukeX"]` -> In this case any part of process may + insert argument between `nuke.exe` and `--NukeX`. To keep them together + it is better to wrap them in another list: `[["nuke.exe", "--NukeX"]]`. + + Notes: + It is possible to use launch context only to prepare environment + variables. In that case `executable` may be None and can be used + 'run_prelaunch_hooks' method to run prelaunch hooks which prepare + them. + + Args: + application (Application): Application definition. + executable (ApplicationExecutable): Object with path to executable. + env_group (Optional[str]): Environment variable group. If not set + 'DEFAULT_ENV_SUBGROUP' is used. + launch_type (Optional[str]): Launch type. If not set 'local' is used. + **data (dict): Any additional data. Data may be used during + preparation to store objects usable in multiple places. + """ + + def __init__( + self, + application, + executable, + env_group=None, + launch_type=None, + **data + ): + # Application object + self.application = application + + self.addons_manager = AddonsManager() + + # Logger + logger_name = "{}-{}".format(self.__class__.__name__, + self.application.full_name) + self.log = Logger.get_logger(logger_name) + + self.executable = executable + + if launch_type is None: + launch_type = LaunchTypes.local + self.launch_type = launch_type + + if env_group is None: + env_group = DEFAULT_ENV_SUBGROUP + + self.env_group = env_group + + self.data = dict(data) + + launch_args = [] + if executable is not None: + launch_args = executable.as_args() + # subprocess.Popen launch arguments (first argument in constructor) + self.launch_args = launch_args + self.launch_args.extend(application.arguments) + if self.data.get("app_args"): + self.launch_args.extend(self.data.pop("app_args")) + + # Handle launch environemtns + src_env = self.data.pop("env", None) + if src_env is not None and not isinstance(src_env, dict): + self.log.warning(( + "Passed `env` kwarg has invalid type: {}. Expected: `dict`." + " Using `os.environ` instead." + ).format(str(type(src_env)))) + src_env = None + + if src_env is None: + src_env = os.environ + + ignored_env = {"QT_API", } + env = { + key: str(value) + for key, value in src_env.items() + if key not in ignored_env + } + # subprocess.Popen keyword arguments + self.kwargs = {"env": env} + + if platform.system().lower() == "windows": + # Detach new process from currently running process on Windows + flags = ( + subprocess.CREATE_NEW_PROCESS_GROUP + | subprocess.DETACHED_PROCESS + ) + self.kwargs["creationflags"] = flags + + if not sys.stdout: + self.kwargs["stdout"] = subprocess.DEVNULL + self.kwargs["stderr"] = subprocess.DEVNULL + + self.prelaunch_hooks = None + self.postlaunch_hooks = None + + self.process = None + self._prelaunch_hooks_executed = False + + @property + def env(self): + if ( + "env" not in self.kwargs + or self.kwargs["env"] is None + ): + self.kwargs["env"] = {} + return self.kwargs["env"] + + @env.setter + def env(self, value): + if not isinstance(value, dict): + raise ValueError( + "'env' attribute expect 'dict' object. Got: {}".format( + str(type(value)) + ) + ) + self.kwargs["env"] = value + + @property + def modules_manager(self): + """ + Deprecated: + Use 'addons_manager' instead. + + """ + return self.addons_manager + + def _collect_addons_launch_hook_paths(self): + """Helper to collect application launch hooks from addons. + + Module have to have implemented 'get_launch_hook_paths' method which + can expect application as argument or nothing. + + Returns: + List[str]: Paths to launch hook directories. + """ + + expected_types = (list, tuple, set) + + output = [] + for module in self.addons_manager.get_enabled_addons(): + # Skip module if does not have implemented 'get_launch_hook_paths' + func = getattr(module, "get_launch_hook_paths", None) + if func is None: + continue + + func = module.get_launch_hook_paths + if hasattr(inspect, "signature"): + sig = inspect.signature(func) + expect_args = len(sig.parameters) > 0 + else: + expect_args = len(inspect.getargspec(func)[0]) > 0 + + # Pass application argument if method expect it. + try: + if expect_args: + hook_paths = func(self.application) + else: + hook_paths = func() + except Exception: + self.log.warning( + "Failed to call 'get_launch_hook_paths'", + exc_info=True + ) + continue + + if not hook_paths: + continue + + # Convert string to list + if isinstance(hook_paths, six.string_types): + hook_paths = [hook_paths] + + # Skip invalid types + if not isinstance(hook_paths, expected_types): + self.log.warning(( + "Result of `get_launch_hook_paths`" + " has invalid type {}. Expected {}" + ).format(type(hook_paths), expected_types)) + continue + + output.extend(hook_paths) + return output + + def paths_to_launch_hooks(self): + """Directory paths where to look for launch hooks.""" + # This method has potential to be part of application manager (maybe). + paths = [] + + # TODO load additional studio paths from settings + global_hooks_dir = os.path.join(AYON_CORE_ROOT, "hooks") + + hooks_dirs = [ + global_hooks_dir + ] + if self.host_name: + # If host requires launch hooks and is module then launch hooks + # should be collected using 'collect_launch_hook_paths' + # - module have to implement 'get_launch_hook_paths' + host_module = self.addons_manager.get_host_addon(self.host_name) + if not host_module: + hooks_dirs.append(os.path.join( + AYON_CORE_ROOT, "hosts", self.host_name, "hooks" + )) + + for path in hooks_dirs: + if ( + os.path.exists(path) + and os.path.isdir(path) + and path not in paths + ): + paths.append(path) + + # Load modules paths + paths.extend(self._collect_addons_launch_hook_paths()) + + return paths + + def discover_launch_hooks(self, force=False): + """Load and prepare launch hooks.""" + if ( + self.prelaunch_hooks is not None + or self.postlaunch_hooks is not None + ): + if not force: + self.log.info("Launch hooks were already discovered.") + return + + self.prelaunch_hooks.clear() + self.postlaunch_hooks.clear() + + self.log.debug("Discovery of launch hooks started.") + + paths = self.paths_to_launch_hooks() + self.log.debug("Paths searched for launch hooks:\n{}".format( + "\n".join("- {}".format(path) for path in paths) + )) + + all_classes = { + "pre": [], + "post": [] + } + for path in paths: + if not os.path.exists(path): + self.log.info( + "Path to launch hooks does not exist: \"{}\"".format(path) + ) + continue + + modules, _crashed = modules_from_path(path) + for _filepath, module in modules: + all_classes["pre"].extend( + classes_from_module(PreLaunchHook, module) + ) + all_classes["post"].extend( + classes_from_module(PostLaunchHook, module) + ) + + for launch_type, classes in all_classes.items(): + hooks_with_order = [] + hooks_without_order = [] + for klass in classes: + try: + hook = klass(self) + if not hook.is_valid: + self.log.debug( + "Skipped hook invalid for current launch context: " + "{}".format(klass.__name__) + ) + continue + + if inspect.isabstract(hook): + self.log.debug("Skipped abstract hook: {}".format( + klass.__name__ + )) + continue + + # Separate hooks by pre/post class + if hook.order is None: + hooks_without_order.append(hook) + else: + hooks_with_order.append(hook) + + except Exception: + self.log.warning( + "Initialization of hook failed: " + "{}".format(klass.__name__), + exc_info=True + ) + + # Sort hooks with order by order + ordered_hooks = list(sorted( + hooks_with_order, key=lambda obj: obj.order + )) + # Extend ordered hooks with hooks without defined order + ordered_hooks.extend(hooks_without_order) + + if launch_type == "pre": + self.prelaunch_hooks = ordered_hooks + else: + self.postlaunch_hooks = ordered_hooks + + self.log.debug("Found {} prelaunch and {} postlaunch hooks.".format( + len(self.prelaunch_hooks), len(self.postlaunch_hooks) + )) + + @property + def app_name(self): + return self.application.name + + @property + def host_name(self): + return self.application.host_name + + @property + def app_group(self): + return self.application.group + + @property + def manager(self): + return self.application.manager + + def _run_process(self): + # Windows and MacOS have easier process start + low_platform = platform.system().lower() + if low_platform in ("windows", "darwin"): + return subprocess.Popen(self.launch_args, **self.kwargs) + + # Linux uses mid process + # - it is possible that the mid process executable is not + # available for this version of AYON in that case use standard + # launch + launch_args = get_linux_launcher_args() + if launch_args is None: + return subprocess.Popen(self.launch_args, **self.kwargs) + + # Prepare data that will be passed to midprocess + # - store arguments to a json and pass path to json as last argument + # - pass environments to set + app_env = self.kwargs.pop("env", {}) + json_data = { + "args": self.launch_args, + "env": app_env + } + if app_env: + # Filter environments of subprocess + self.kwargs["env"] = { + key: value + for key, value in os.environ.items() + if key in app_env + } + + # Create temp file + json_temp = tempfile.NamedTemporaryFile( + mode="w", prefix="op_app_args", suffix=".json", delete=False + ) + json_temp.close() + json_temp_filpath = json_temp.name + with open(json_temp_filpath, "w") as stream: + json.dump(json_data, stream) + + launch_args.append(json_temp_filpath) + + # Create mid-process which will launch application + process = subprocess.Popen(launch_args, **self.kwargs) + # Wait until the process finishes + # - This is important! The process would stay in "open" state. + process.wait() + # Remove the temp file + os.remove(json_temp_filpath) + # Return process which is already terminated + return process + + def run_prelaunch_hooks(self): + """Run prelaunch hooks. + + This method will be executed only once, any future calls will skip + the processing. + """ + + if self._prelaunch_hooks_executed: + self.log.warning("Prelaunch hooks were already executed.") + return + # Discover launch hooks + self.discover_launch_hooks() + + # Execute prelaunch hooks + for prelaunch_hook in self.prelaunch_hooks: + self.log.debug("Executing prelaunch hook: {}".format( + str(prelaunch_hook.__class__.__name__) + )) + prelaunch_hook.execute() + self._prelaunch_hooks_executed = True + + def launch(self): + """Collect data for new process and then create it. + + This method must not be executed more than once. + + Returns: + subprocess.Popen: Created process as Popen object. + """ + if self.process is not None: + self.log.warning("Application was already launched.") + return + + if not self._prelaunch_hooks_executed: + self.run_prelaunch_hooks() + + self.log.debug("All prelaunch hook executed. Starting new process.") + + # Prepare subprocess args + args_len_str = "" + if isinstance(self.launch_args, str): + args = self.launch_args + else: + args = self.clear_launch_args(self.launch_args) + args_len_str = " ({})".format(len(args)) + self.log.info( + "Launching \"{}\" with args{}: {}".format( + self.application.full_name, args_len_str, args + ) + ) + self.launch_args = args + + # Run process + self.process = self._run_process() + + # Process post launch hooks + for postlaunch_hook in self.postlaunch_hooks: + self.log.debug("Executing postlaunch hook: {}".format( + str(postlaunch_hook.__class__.__name__) + )) + + # TODO how to handle errors? + # - store to variable to let them accessible? + try: + postlaunch_hook.execute() + + except Exception: + self.log.warning( + "After launch procedures were not successful.", + exc_info=True + ) + + self.log.debug("Launch of {} finished.".format( + self.application.full_name + )) + + return self.process + + @staticmethod + def clear_launch_args(args): + """Collect launch arguments to final order. + + Launch argument should be list that may contain another lists this + function will upack inner lists and keep ordering. + + ``` + # source + [ [ arg1, [ arg2, arg3 ] ], arg4, [arg5, arg6]] + # result + [ arg1, arg2, arg3, arg4, arg5, arg6] + + Args: + args (list): Source arguments in list may contain inner lists. + + Return: + list: Unpacked arguments. + """ + if isinstance(args, str): + return args + all_cleared = False + while not all_cleared: + all_cleared = True + new_args = [] + for arg in args: + if isinstance(arg, (list, tuple, set)): + all_cleared = False + for _arg in arg: + new_args.append(_arg) + else: + new_args.append(arg) + args = new_args + + return args + diff --git a/client/ayon_core/addons/applications/ayon_applications/plugins/publish/collect_app_name.py b/client/ayon_core/addons/applications/ayon_applications/plugins/publish/collect_app_name.py new file mode 100644 index 0000000000..f54a551cda --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/plugins/publish/collect_app_name.py @@ -0,0 +1,48 @@ +""" +Run after global plugin 'CollectHostName' in ayon_core. + +Requires: + None + +Provides: + context -> hostName (str) + context -> appName (str) + context -> appLabel (str) +""" +import os +import pyblish.api + +from ayon_applications import ApplicationManager + + +class CollectAppName(pyblish.api.ContextPlugin): + """Collect avalon host name to context.""" + + label = "Collect App Name" + order = pyblish.api.CollectorOrder - 0.499999 + + def process(self, context): + host_name = context.data.get("hostName") + app_name = context.data.get("appName") + app_label = context.data.get("appLabel") + # Don't override value if is already set + if host_name and app_name and app_label: + return + + # Use AYON_APP_NAME to get full app name + if not app_name: + app_name = os.environ.get("AYON_APP_NAME") + + # Fill missing values based on app full name + if (not host_name or not app_label) and app_name: + app_manager = ApplicationManager() + app = app_manager.applications.get(app_name) + if app: + if not host_name: + host_name = app.host_name + if not app_label: + app_label = app.full_label + + context.data["hostName"] = host_name + context.data["appName"] = app_name + context.data["appLabel"] = app_label diff --git a/client/ayon_core/addons/applications/ayon_applications/utils.py b/client/ayon_core/addons/applications/ayon_applications/utils.py new file mode 100644 index 0000000000..234fa6c683 --- /dev/null +++ b/client/ayon_core/addons/applications/ayon_applications/utils.py @@ -0,0 +1,609 @@ +import os +import copy +import json +import platform +import collections + +import six +import acre + +from ayon_core import AYON_CORE_ROOT +from ayon_core.settings import get_project_settings +from ayon_core.lib import Logger, get_ayon_username +from ayon_core.addon import AddonsManager +from ayon_core.pipeline import HOST_WORKFILE_EXTENSIONS +from ayon_core.pipeline.template_data import get_template_data +from ayon_core.pipeline.workfile import ( + get_workfile_template_key, + get_workdir_with_workdir_data, + get_last_workfile, + should_use_last_workfile_on_launch, + should_open_workfiles_tool_on_launch, +) + +from .constants import PLATFORM_NAMES, DEFAULT_ENV_SUBGROUP +from .exceptions import MissingRequiredKey, ApplicationLaunchFailed +from .manager import ApplicationManager + + +def parse_environments(env_data, env_group=None, platform_name=None): + """Parse environment values from settings byt group and platform. + + Data may contain up to 2 hierarchical levels of dictionaries. At the end + of the last level must be string or list. List is joined using platform + specific joiner (';' for windows and ':' for linux and mac). + + Hierarchical levels can contain keys for subgroups and platform name. + Platform specific values must be always last level of dictionary. Platform + names are "windows" (MS Windows), "linux" (any linux distribution) and + "darwin" (any MacOS distribution). + + Subgroups are helpers added mainly for standard and on farm usage. Farm + may require different environments for e.g. licence related values or + plugins. Default subgroup is "standard". + + Examples: + ``` + { + # Unchanged value + "ENV_KEY1": "value", + # Empty values are kept (unset environment variable) + "ENV_KEY2": "", + + # Join list values with ':' or ';' + "ENV_KEY3": ["value1", "value2"], + + # Environment groups + "ENV_KEY4": { + "standard": "DEMO_SERVER_URL", + "farm": "LICENCE_SERVER_URL" + }, + + # Platform specific (and only for windows and mac) + "ENV_KEY5": { + "windows": "windows value", + "darwin": ["value 1", "value 2"] + }, + + # Environment groups and platform combination + "ENV_KEY6": { + "farm": "FARM_VALUE", + "standard": { + "windows": ["value1", "value2"], + "linux": "value1", + "darwin": "" + } + } + } + ``` + """ + output = {} + if not env_data: + return output + + if not env_group: + env_group = DEFAULT_ENV_SUBGROUP + + if not platform_name: + platform_name = platform.system().lower() + + for key, value in env_data.items(): + if isinstance(value, dict): + # Look if any key is platform key + # - expect that represents environment group if does not contain + # platform keys + if not PLATFORM_NAMES.intersection(set(value.keys())): + # Skip the key if group is not available + if env_group not in value: + continue + value = value[env_group] + + # Check again if value is dictionary + # - this time there should be only platform keys + if isinstance(value, dict): + value = value.get(platform_name) + + # Check if value is list and join it's values + # QUESTION Should empty values be skipped? + if isinstance(value, (list, tuple)): + value = os.pathsep.join(value) + + # Set key to output if value is string + if isinstance(value, six.string_types): + output[key] = value + return output + + +class EnvironmentPrepData(dict): + """Helper dictionary for storin temp data during environment prep. + + Args: + data (dict): Data must contain required keys. + """ + required_keys = ( + "project_entity", "folder_entity", "task_entity", "app", "anatomy" + ) + + def __init__(self, data): + for key in self.required_keys: + if key not in data: + raise MissingRequiredKey(key) + + if not data.get("log"): + data["log"] = Logger.get_logger("EnvironmentPrepData") + + if data.get("env") is None: + data["env"] = os.environ.copy() + + project_name = data["project_entity"]["name"] + if "project_settings" not in data: + data["project_settings"] = get_project_settings(project_name) + + super(EnvironmentPrepData, self).__init__(data) + + +def get_app_environments_for_context( + project_name, + folder_path, + task_name, + app_name, + env_group=None, + launch_type=None, + env=None, + addons_manager=None +): + """Prepare environment variables by context. + Args: + project_name (str): Name of project. + folder_path (str): Folder path. + task_name (str): Name of task. + app_name (str): Name of application that is launched and can be found + by ApplicationManager. + env_group (Optional[str]): Name of environment group. If not passed + default group is used. + launch_type (Optional[str]): Type for which prelaunch hooks are + executed. + env (Optional[dict[str, str]]): Initial environment variables. + `os.environ` is used when not passed. + addons_manager (Optional[AddonsManager]): Initialized modules + manager. + + Returns: + dict: Environments for passed context and application. + """ + + # Prepare app object which can be obtained only from ApplicationManager + app_manager = ApplicationManager() + context = app_manager.create_launch_context( + app_name, + project_name=project_name, + folder_path=folder_path, + task_name=task_name, + env_group=env_group, + launch_type=launch_type, + env=env, + addons_manager=addons_manager, + modules_manager=addons_manager, + ) + context.run_prelaunch_hooks() + return context.env + + +def _merge_env(env, current_env): + """Modified function(merge) from acre module.""" + result = current_env.copy() + for key, value in env.items(): + # Keep missing keys by not filling `missing` kwarg + value = acre.lib.partial_format(value, data=current_env) + result[key] = value + return result + + +def _add_python_version_paths(app, env, logger, addons_manager): + """Add vendor packages specific for a Python version.""" + + for addon in addons_manager.get_enabled_addons(): + addon.modify_application_launch_arguments(app, env) + + # Skip adding if host name is not set + if not app.host_name: + return + + # Add Python 2/3 modules + python_vendor_dir = os.path.join( + AYON_CORE_ROOT, + "vendor", + "python" + ) + if app.use_python_2: + pythonpath = os.path.join(python_vendor_dir, "python_2") + else: + pythonpath = os.path.join(python_vendor_dir, "python_3") + + if not os.path.exists(pythonpath): + return + + logger.debug("Adding Python version specific paths to PYTHONPATH") + python_paths = [pythonpath] + + # Load PYTHONPATH from current launch context + python_path = env.get("PYTHONPATH") + if python_path: + python_paths.append(python_path) + + # Set new PYTHONPATH to launch context environments + env["PYTHONPATH"] = os.pathsep.join(python_paths) + + +def prepare_app_environments( + data, env_group=None, implementation_envs=True, addons_manager=None +): + """Modify launch environments based on launched app and context. + + Args: + data (EnvironmentPrepData): Dictionary where result and intermediate + result will be stored. + + """ + app = data["app"] + log = data["log"] + source_env = data["env"].copy() + + if addons_manager is None: + addons_manager = AddonsManager() + + _add_python_version_paths(app, source_env, log, addons_manager) + + # Use environments from local settings + filtered_local_envs = {} + # NOTE Overrides for environment variables are not implemented in AYON. + # project_settings = data["project_settings"] + # whitelist_envs = project_settings["general"].get("local_env_white_list") + # if whitelist_envs: + # local_settings = get_local_settings() + # local_envs = local_settings.get("environments") or {} + # filtered_local_envs = { + # key: value + # for key, value in local_envs.items() + # if key in whitelist_envs + # } + + # Apply local environment variables for already existing values + for key, value in filtered_local_envs.items(): + if key in source_env: + source_env[key] = value + + # `app_and_tool_labels` has debug purpose + app_and_tool_labels = [app.full_name] + # Environments for application + environments = [ + app.group.environment, + app.environment + ] + + folder_entity = data.get("folder_entity") + # Add tools environments + groups_by_name = {} + tool_by_group_name = collections.defaultdict(dict) + if folder_entity: + # Make sure each tool group can be added only once + for key in folder_entity["attrib"].get("tools") or []: + tool = app.manager.tools.get(key) + if not tool or not tool.is_valid_for_app(app): + continue + groups_by_name[tool.group.name] = tool.group + tool_by_group_name[tool.group.name][tool.name] = tool + + for group_name in sorted(groups_by_name.keys()): + group = groups_by_name[group_name] + environments.append(group.environment) + for tool_name in sorted(tool_by_group_name[group_name].keys()): + tool = tool_by_group_name[group_name][tool_name] + environments.append(tool.environment) + app_and_tool_labels.append(tool.full_name) + + log.debug( + "Will add environments for apps and tools: {}".format( + ", ".join(app_and_tool_labels) + ) + ) + + env_values = {} + for _env_values in environments: + if not _env_values: + continue + + # Choose right platform + tool_env = parse_environments(_env_values, env_group) + + # Apply local environment variables + # - must happen between all values because they may be used during + # merge + for key, value in filtered_local_envs.items(): + if key in tool_env: + tool_env[key] = value + + # Merge dictionaries + env_values = _merge_env(tool_env, env_values) + + merged_env = _merge_env(env_values, source_env) + + loaded_env = acre.compute(merged_env, cleanup=False) + + final_env = None + # Add host specific environments + if app.host_name and implementation_envs: + host_addon = addons_manager.get_host_addon(app.host_name) + add_implementation_envs = None + if host_addon: + add_implementation_envs = getattr( + host_addon, "add_implementation_envs", None + ) + if add_implementation_envs: + # Function may only modify passed dict without returning value + final_env = add_implementation_envs(loaded_env, app) + + if final_env is None: + final_env = loaded_env + + keys_to_remove = set(source_env.keys()) - set(final_env.keys()) + + # Update env + data["env"].update(final_env) + for key in keys_to_remove: + data["env"].pop(key, None) + + +def apply_project_environments_value( + project_name, env, project_settings=None, env_group=None +): + """Apply project specific environments on passed environments. + + The environments are applied on passed `env` argument value so it is not + required to apply changes back. + + Args: + project_name (str): Name of project for which environments should be + received. + env (dict): Environment values on which project specific environments + will be applied. + project_settings (dict): Project settings for passed project name. + Optional if project settings are already prepared. + + Returns: + dict: Passed env values with applied project environments. + + Raises: + KeyError: If project settings do not contain keys for project specific + environments. + + """ + if project_settings is None: + project_settings = get_project_settings(project_name) + + env_value = project_settings["core"]["project_environments"] + if env_value: + env_value = json.loads(env_value) + parsed_value = parse_environments(env_value, env_group) + env.update(acre.compute( + _merge_env(parsed_value, env), + cleanup=False + )) + return env + + +def prepare_context_environments(data, env_group=None, addons_manager=None): + """Modify launch environments with context data for launched host. + + Args: + data (EnvironmentPrepData): Dictionary where result and intermediate + result will be stored. + + """ + # Context environments + log = data["log"] + + project_entity = data["project_entity"] + folder_entity = data["folder_entity"] + task_entity = data["task_entity"] + if not project_entity: + log.info( + "Skipping context environments preparation." + " Launch context does not contain required data." + ) + return + + # Load project specific environments + project_name = project_entity["name"] + project_settings = get_project_settings(project_name) + data["project_settings"] = project_settings + + app = data["app"] + context_env = { + "AYON_PROJECT_NAME": project_entity["name"], + "AYON_APP_NAME": app.full_name + } + if folder_entity: + folder_path = folder_entity["path"] + context_env["AYON_FOLDER_PATH"] = folder_path + + if task_entity: + context_env["AYON_TASK_NAME"] = task_entity["name"] + + log.debug( + "Context environments set:\n{}".format( + json.dumps(context_env, indent=4) + ) + ) + data["env"].update(context_env) + + # Apply project specific environments on current env value + # - apply them once the context environments are set + apply_project_environments_value( + project_name, data["env"], project_settings, env_group + ) + + if not app.is_host: + return + + data["env"]["AYON_HOST_NAME"] = app.host_name + + if not folder_entity or not task_entity: + # QUESTION replace with log.info and skip workfile discovery? + # - technically it should be possible to launch host without context + raise ApplicationLaunchFailed( + "Host launch require folder and task context." + ) + + workdir_data = get_template_data( + project_entity, + folder_entity, + task_entity, + app.host_name, + project_settings + ) + data["workdir_data"] = workdir_data + + anatomy = data["anatomy"] + + task_type = workdir_data["task"]["type"] + # Temp solution how to pass task type to `_prepare_last_workfile` + data["task_type"] = task_type + + try: + workdir = get_workdir_with_workdir_data( + workdir_data, + anatomy.project_name, + anatomy, + project_settings=project_settings + ) + + except Exception as exc: + raise ApplicationLaunchFailed( + "Error in anatomy.format: {}".format(str(exc)) + ) + + if not os.path.exists(workdir): + log.debug( + "Creating workdir folder: \"{}\"".format(workdir) + ) + try: + os.makedirs(workdir) + except Exception as exc: + raise ApplicationLaunchFailed( + "Couldn't create workdir because: {}".format(str(exc)) + ) + + data["env"]["AYON_WORKDIR"] = workdir + + _prepare_last_workfile(data, workdir, addons_manager) + + +def _prepare_last_workfile(data, workdir, addons_manager): + """last workfile workflow preparation. + + Function check if should care about last workfile workflow and tries + to find the last workfile. Both information are stored to `data` and + environments. + + Last workfile is filled always (with version 1) even if any workfile + exists yet. + + Args: + data (EnvironmentPrepData): Dictionary where result and intermediate + result will be stored. + workdir (str): Path to folder where workfiles should be stored. + + """ + if not addons_manager: + addons_manager = AddonsManager() + + log = data["log"] + + _workdir_data = data.get("workdir_data") + if not _workdir_data: + log.info( + "Skipping last workfile preparation." + " Key `workdir_data` not filled." + ) + return + + app = data["app"] + workdir_data = copy.deepcopy(_workdir_data) + project_name = data["project_name"] + task_name = data["task_name"] + task_type = data["task_type"] + + start_last_workfile = data.get("start_last_workfile") + if start_last_workfile is None: + start_last_workfile = should_use_last_workfile_on_launch( + project_name, app.host_name, task_name, task_type + ) + else: + log.info("Opening of last workfile was disabled by user") + + data["start_last_workfile"] = start_last_workfile + + workfile_startup = should_open_workfiles_tool_on_launch( + project_name, app.host_name, task_name, task_type + ) + data["workfile_startup"] = workfile_startup + + # Store boolean as "0"(False) or "1"(True) + data["env"]["AVALON_OPEN_LAST_WORKFILE"] = ( + str(int(bool(start_last_workfile))) + ) + data["env"]["AYON_WORKFILE_TOOL_ON_START"] = ( + str(int(bool(workfile_startup))) + ) + + _sub_msg = "" if start_last_workfile else " not" + log.debug( + "Last workfile should{} be opened on start.".format(_sub_msg) + ) + + # Last workfile path + last_workfile_path = data.get("last_workfile_path") or "" + if not last_workfile_path: + host_addon = addons_manager.get_host_addon(app.host_name) + if host_addon: + extensions = host_addon.get_workfile_extensions() + else: + extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) + + if extensions: + anatomy = data["anatomy"] + project_settings = data["project_settings"] + task_type = workdir_data["task"]["type"] + template_key = get_workfile_template_key( + project_name, + task_type, + app.host_name, + project_settings=project_settings + ) + # Find last workfile + file_template = anatomy.get_template_item( + "work", template_key, "file" + ).template + + workdir_data.update({ + "version": 1, + "user": get_ayon_username(), + "ext": extensions[0] + }) + + last_workfile_path = get_last_workfile( + workdir, file_template, workdir_data, extensions, True + ) + + if os.path.exists(last_workfile_path): + log.debug(( + "Workfiles for launch context does not exists" + " yet but path will be set." + )) + log.debug( + "Setting last workfile path: {}".format(last_workfile_path) + ) + + data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path + data["last_workfile_path"] = last_workfile_path diff --git a/client/ayon_core/cli.py b/client/ayon_core/cli.py index 2759b4fccf..80fbb57340 100644 --- a/client/ayon_core/cli.py +++ b/client/ayon_core/cli.py @@ -4,6 +4,7 @@ import os import sys import code import traceback +from pathlib import Path import click import acre @@ -11,7 +12,7 @@ import acre from ayon_core import AYON_CORE_ROOT from ayon_core.addon import AddonsManager from ayon_core.settings import get_general_environments -from ayon_core.lib import initialize_ayon_connection +from ayon_core.lib import initialize_ayon_connection, is_running_from_build from .cli_commands import Commands @@ -81,7 +82,7 @@ main_cli.set_alias("addon", "module") @main_cli.command() @click.argument("output_json_path") @click.option("--project", help="Project name", default=None) -@click.option("--asset", help="Asset name", default=None) +@click.option("--asset", help="Folder path", default=None) @click.option("--task", help="Task name", default=None) @click.option("--app", help="Application name", default=None) @click.option( @@ -96,6 +97,10 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup): environments will be extracted. Context options are "project", "asset", "task", "app" + + Deprecated: + This function is deprecated and will be removed in future. Please use + 'addon applications extractenvironments ...' instead. """ Commands.extractenvironments( output_json_path, project, asset, task, app, envgroup @@ -127,7 +132,7 @@ def publish_report_viewer(): @main_cli.command() @click.argument("output_path") @click.option("--project", help="Define project context") -@click.option("--asset", help="Define asset in project (project must be set)") +@click.option("--folder", help="Define folder in project (project must be set)") @click.option( "--strict", is_flag=True, @@ -136,18 +141,18 @@ def publish_report_viewer(): def contextselection( output_path, project, - asset, + folder, strict ): """Show Qt dialog to select context. - Context is project name, asset name and task name. The result is stored + Context is project name, folder path and task name. The result is stored into json file which path is passed in first argument. """ Commands.contextselection( output_path, project, - asset, + folder, strict ) @@ -163,16 +168,27 @@ def run(script): if not script: print("Error: missing path to script file.") + return + + # Remove first argument if it is the same as AYON executable + # - Forward compatibility with future AYON versions. + # - Current AYON launcher keeps the arguments with first argument but + # future versions might remove it. + first_arg = sys.argv[0] + if is_running_from_build(): + comp_path = os.path.join(os.environ["AYON_ROOT"], "start.py") else: + comp_path = os.getenv("AYON_EXECUTABLE") + # Compare paths and remove first argument if it is the same as AYON + if Path(first_arg).resolve() == Path(comp_path).resolve(): + sys.argv.pop(0) - args = sys.argv - args.remove("run") - args.remove(script) - sys.argv = args + # Remove 'run' command from sys.argv + sys.argv.remove("run") - args_string = " ".join(args[1:]) - print(f"... running: {script} {args_string}") - runpy.run_path(script, run_name="__main__", ) + args_string = " ".join(sys.argv[1:]) + print(f"... running: {script} {args_string}") + runpy.run_path(script, run_name="__main__") @main_cli.command() diff --git a/client/ayon_core/cli_commands.py b/client/ayon_core/cli_commands.py index bc0a22382c..0fb18be687 100644 --- a/client/ayon_core/cli_commands.py +++ b/client/ayon_core/cli_commands.py @@ -2,7 +2,6 @@ """Implementation of AYON commands.""" import os import sys -import json import warnings @@ -58,10 +57,7 @@ class Commands: """ from ayon_core.lib import Logger - from ayon_core.lib.applications import ( - get_app_environments_for_context, - LaunchTypes, - ) + from ayon_core.addon import AddonsManager from ayon_core.pipeline import ( install_ayon_plugins, @@ -69,7 +65,6 @@ class Commands: ) # Register target and host - import pyblish.api import pyblish.util if not isinstance(path, str): @@ -100,15 +95,13 @@ class Commands: for plugin_path in publish_paths: pyblish.api.register_plugin_path(plugin_path) - app_full_name = os.getenv("AYON_APP_NAME") - if app_full_name: + applications_addon = manager.get_enabled_addon("applications") + if applications_addon is not None: context = get_global_context() - env = get_app_environments_for_context( + env = applications_addon.get_farm_publish_environment_variables( context["project_name"], context["folder_path"], context["task_name"], - app_full_name, - launch_type=LaunchTypes.farm_publish, ) os.environ.update(env) @@ -150,36 +143,36 @@ class Commands: log.info("Publish finished.") @staticmethod - def extractenvironments(output_json_path, project, asset, task, app, - env_group): + def extractenvironments( + output_json_path, project, asset, task, app, env_group + ): """Produces json file with environment based on project and app. Called by Deadline plugin to propagate environment into render jobs. """ - from ayon_core.lib.applications import ( - get_app_environments_for_context, - LaunchTypes, + from ayon_core.addon import AddonsManager + + warnings.warn( + ( + "Command 'extractenvironments' is deprecated and will be" + " removed in future. Please use " + "'addon applications extractenvironments ...' instead." + ), + DeprecationWarning ) - if all((project, asset, task, app)): - env = get_app_environments_for_context( - project, - asset, - task, - app, - env_group=env_group, - launch_type=LaunchTypes.farm_render + addons_manager = AddonsManager() + applications_addon = addons_manager.get_enabled_addon("applications") + if applications_addon is None: + raise RuntimeError( + "Applications addon is not available or enabled." ) - else: - env = os.environ.copy() - output_dir = os.path.dirname(output_json_path) - if not os.path.exists(output_dir): - os.makedirs(output_dir) - - with open(output_json_path, "w") as file_stream: - json.dump(env, file_stream, indent=4) + # Please ignore the fact this is using private method + applications_addon._cli_extract_environments( + output_json_path, project, asset, task, app, env_group + ) @staticmethod def contextselection(output_path, project_name, folder_path, strict): diff --git a/client/ayon_core/hooks/pre_add_last_workfile_arg.py b/client/ayon_core/hooks/pre_add_last_workfile_arg.py index d11bb106d6..74964e0df9 100644 --- a/client/ayon_core/hooks/pre_add_last_workfile_arg.py +++ b/client/ayon_core/hooks/pre_add_last_workfile_arg.py @@ -1,6 +1,6 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class AddLastWorkfileToLaunchArgs(PreLaunchHook): diff --git a/client/ayon_core/hooks/pre_copy_template_workfile.py b/client/ayon_core/hooks/pre_copy_template_workfile.py index 096ad7dd7e..c884116578 100644 --- a/client/ayon_core/hooks/pre_copy_template_workfile.py +++ b/client/ayon_core/hooks/pre_copy_template_workfile.py @@ -1,7 +1,7 @@ import os import shutil from ayon_core.settings import get_project_settings -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.pipeline.workfile import ( get_custom_workfile_template, get_custom_workfile_template_by_string_context diff --git a/client/ayon_core/hooks/pre_create_extra_workdir_folders.py b/client/ayon_core/hooks/pre_create_extra_workdir_folders.py index 72c6bf2f68..8cbdaa338e 100644 --- a/client/ayon_core/hooks/pre_create_extra_workdir_folders.py +++ b/client/ayon_core/hooks/pre_create_extra_workdir_folders.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.pipeline.workfile import create_workdir_extra_folders diff --git a/client/ayon_core/hooks/pre_global_host_data.py b/client/ayon_core/hooks/pre_global_host_data.py index 27e66450ab..e93b512742 100644 --- a/client/ayon_core/hooks/pre_global_host_data.py +++ b/client/ayon_core/hooks/pre_global_host_data.py @@ -1,7 +1,7 @@ from ayon_api import get_project, get_folder_by_path, get_task_by_name -from ayon_core.lib.applications import ( - PreLaunchHook, +from ayon_applications import PreLaunchHook +from ayon_applications.utils import ( EnvironmentPrepData, prepare_app_environments, prepare_context_environments diff --git a/client/ayon_core/hooks/pre_mac_launch.py b/client/ayon_core/hooks/pre_mac_launch.py index 34680155f1..b234a20310 100644 --- a/client/ayon_core/hooks/pre_mac_launch.py +++ b/client/ayon_core/hooks/pre_mac_launch.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class LaunchWithTerminal(PreLaunchHook): diff --git a/client/ayon_core/hooks/pre_new_console_apps.py b/client/ayon_core/hooks/pre_new_console_apps.py index c81b924573..9777d37900 100644 --- a/client/ayon_core/hooks/pre_new_console_apps.py +++ b/client/ayon_core/hooks/pre_new_console_apps.py @@ -1,5 +1,5 @@ import subprocess -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class LaunchNewConsoleApps(PreLaunchHook): diff --git a/client/ayon_core/hooks/pre_ocio_hook.py b/client/ayon_core/hooks/pre_ocio_hook.py index e135a5bb12..0817afec71 100644 --- a/client/ayon_core/hooks/pre_ocio_hook.py +++ b/client/ayon_core/hooks/pre_ocio_hook.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook +from ayon_applications import PreLaunchHook from ayon_core.pipeline.colorspace import get_imageio_config from ayon_core.pipeline.template_data import get_template_data_with_names diff --git a/client/ayon_core/host/dirmap.py b/client/ayon_core/host/dirmap.py index effafb6261..2e24877d28 100644 --- a/client/ayon_core/host/dirmap.py +++ b/client/ayon_core/host/dirmap.py @@ -36,23 +36,23 @@ class HostDirmap(object): host_name, project_name, project_settings=None, - sync_module=None + sitesync_addon=None ): self.host_name = host_name self.project_name = project_name self._project_settings = project_settings - self._sync_module = sync_module + self._sitesync_addon = sitesync_addon # to limit reinit of Modules - self._sync_module_discovered = sync_module is not None + self._sitesync_addon_discovered = sitesync_addon is not None self._log = None @property - def sync_module(self): - if not self._sync_module_discovered: - self._sync_module_discovered = True + def sitesync_addon(self): + if not self._sitesync_addon_discovered: + self._sitesync_addon_discovered = True manager = AddonsManager() - self._sync_module = manager.get("sync_server") - return self._sync_module + self._sitesync_addon = manager.get("sitesync") + return self._sitesync_addon @property def project_settings(self): @@ -158,25 +158,25 @@ class HostDirmap(object): """ project_name = self.project_name - sync_module = self.sync_module + sitesync_addon = self.sitesync_addon mapping = {} if ( - sync_module is None - or not sync_module.enabled - or project_name not in sync_module.get_enabled_projects() + sitesync_addon is None + or not sitesync_addon.enabled + or project_name not in sitesync_addon.get_enabled_projects() ): return mapping - active_site = sync_module.get_local_normalized_site( - sync_module.get_active_site(project_name)) - remote_site = sync_module.get_local_normalized_site( - sync_module.get_remote_site(project_name)) + active_site = sitesync_addon.get_local_normalized_site( + sitesync_addon.get_active_site(project_name)) + remote_site = sitesync_addon.get_local_normalized_site( + sitesync_addon.get_remote_site(project_name)) self.log.debug( "active {} - remote {}".format(active_site, remote_site) ) if active_site == "local" and active_site != remote_site: - sync_settings = sync_module.get_sync_project_setting( + sync_settings = sitesync_addon.get_sync_project_setting( project_name, exclude_locals=False, cached=False) @@ -194,7 +194,7 @@ class HostDirmap(object): self.log.debug("remote overrides {}".format(remote_overrides)) current_platform = platform.system().lower() - remote_provider = sync_module.get_provider_for_site( + remote_provider = sitesync_addon.get_provider_for_site( project_name, remote_site ) # dirmap has sense only with regular disk provider, in the workfile diff --git a/client/ayon_core/hosts/aftereffects/api/__init__.py b/client/ayon_core/hosts/aftereffects/api/__init__.py index 4c4a8cce2f..b1d83c5ad9 100644 --- a/client/ayon_core/hosts/aftereffects/api/__init__.py +++ b/client/ayon_core/hosts/aftereffects/api/__init__.py @@ -31,6 +31,7 @@ __all__ = [ "get_stub", # pipeline + "AfterEffectsHost", "ls", "containerise", diff --git a/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py b/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py index 979d9ff3e5..a37481566e 100644 --- a/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py +++ b/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py @@ -6,10 +6,7 @@ from ayon_core.lib import ( get_ayon_launcher_args, is_using_ayon_console, ) -from ayon_core.lib.applications import ( - PreLaunchHook, - LaunchTypes, -) +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.aftereffects import get_launch_script_path diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py index afd58ca758..4134e9d593 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py @@ -1,14 +1,11 @@ import os -import re import tempfile -import attr +import attr import pyblish.api -from ayon_core.settings import get_project_settings from ayon_core.pipeline import publish from ayon_core.pipeline.publish import RenderInstance - from ayon_core.hosts.aftereffects.api import get_stub diff --git a/client/ayon_core/hosts/blender/api/ops.py b/client/ayon_core/hosts/blender/api/ops.py index d71ee6faf5..c03ec98d0c 100644 --- a/client/ayon_core/hosts/blender/api/ops.py +++ b/client/ayon_core/hosts/blender/api/ops.py @@ -191,7 +191,7 @@ def _process_app_events() -> Optional[float]: class LaunchQtApp(bpy.types.Operator): - """A Base class for opertors to launch a Qt app.""" + """A Base class for operators to launch a Qt app.""" _app: QtWidgets.QApplication _window = Union[QtWidgets.QDialog, ModuleType] diff --git a/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py b/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py index 00b297f998..9041ef7309 100644 --- a/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py +++ b/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py @@ -1,6 +1,6 @@ from pathlib import Path -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class AddPythonScriptToLaunchArgs(PreLaunchHook): diff --git a/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py b/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py index c80a1bd669..8f46eea0de 100644 --- a/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py +++ b/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py @@ -2,7 +2,7 @@ import os import re import subprocess from platform import system -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class InstallPySideToBlender(PreLaunchHook): diff --git a/client/ayon_core/hosts/blender/hooks/pre_windows_console.py b/client/ayon_core/hosts/blender/hooks/pre_windows_console.py index e3a8593cd9..47303a7af4 100644 --- a/client/ayon_core/hosts/blender/hooks/pre_windows_console.py +++ b/client/ayon_core/hosts/blender/hooks/pre_windows_console.py @@ -1,5 +1,5 @@ import subprocess -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class BlenderConsoleWindows(PreLaunchHook): diff --git a/client/ayon_core/hosts/blender/plugins/load/load_blend.py b/client/ayon_core/hosts/blender/plugins/load/load_blend.py index e84dddc88f..1984193a30 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_blend.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_blend.py @@ -227,7 +227,7 @@ class BlendLoader(plugin.AssetLoader): obj.animation_data_create() obj.animation_data.action = actions[obj.name] - # Restore the old data, but reset memebers, as they don't exist anymore + # Restore the old data, but reset members, as they don't exist anymore # This avoids a crash, because the memory addresses of those members # are not valid anymore old_data["members"] = [] diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_camera_abc.py b/client/ayon_core/hosts/blender/plugins/publish/extract_camera_abc.py index cc783e552c..c60c92dee1 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_camera_abc.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_camera_abc.py @@ -4,7 +4,6 @@ import bpy from ayon_core.pipeline import publish from ayon_core.hosts.blender.api import plugin -from ayon_core.hosts.blender.api.pipeline import AVALON_PROPERTY class ExtractCameraABC(publish.Extractor, publish.OptionalPyblishPluginMixin): diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_fbx.py b/client/ayon_core/hosts/blender/plugins/publish/extract_fbx.py index 7ebda2c4cd..e6367dbc0d 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_fbx.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_fbx.py @@ -4,7 +4,6 @@ import bpy from ayon_core.pipeline import publish from ayon_core.hosts.blender.api import plugin -from ayon_core.hosts.blender.api.pipeline import AVALON_PROPERTY class ExtractFBX(publish.Extractor, publish.OptionalPyblishPluginMixin): diff --git a/client/ayon_core/hosts/blender/plugins/publish/validate_deadline_publish.py b/client/ayon_core/hosts/blender/plugins/publish/validate_deadline_publish.py index b37db44cd4..a86e73ba81 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/validate_deadline_publish.py +++ b/client/ayon_core/hosts/blender/plugins/publish/validate_deadline_publish.py @@ -32,7 +32,7 @@ class ValidateDeadlinePublish(pyblish.api.InstancePlugin, tree = bpy.context.scene.node_tree output_type = "CompositorNodeOutputFile" output_node = None - # Remove all output nodes that inlcude "AYON" in the name. + # Remove all output nodes that include "AYON" in the name. # There should be only one. for node in tree.nodes: if node.bl_idname == output_type and "AYON" in node.name: diff --git a/client/ayon_core/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py b/client/ayon_core/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py index 63b7dc7530..fb16bb7f8d 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py +++ b/client/ayon_core/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py @@ -12,7 +12,7 @@ from ayon_core.pipeline.publish import ( import ayon_core.hosts.blender.api.action -class ValidateMeshNoNegativeScale(pyblish.api.Validator, +class ValidateMeshNoNegativeScale(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure that meshes don't have a negative scale.""" diff --git a/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py b/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py index 73b368e4e3..8350c7b7c8 100644 --- a/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py +++ b/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py @@ -3,7 +3,7 @@ import shutil import winreg import subprocess from ayon_core.lib import get_ayon_launcher_args -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.celaction import CELACTION_ROOT_DIR @@ -118,7 +118,7 @@ class CelactionPrelaunchHook(PreLaunchHook): def workfile_path(self): workfile_path = self.data["last_workfile_path"] - # copy workfile from template if doesnt exist any on path + # copy workfile from template if doesn't exist any on path if not os.path.exists(workfile_path): # TODO add ability to set different template workfile path via # settings diff --git a/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index 54dea15dff..1820569918 100644 --- a/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -3,11 +3,11 @@ import sys from pprint import pformat -class CollectCelactionCliKwargs(pyblish.api.Collector): +class CollectCelactionCliKwargs(pyblish.api.ContextPlugin): """ Collects all keyword arguments passed from the terminal """ label = "Collect Celaction Cli Kwargs" - order = pyblish.api.Collector.order - 0.1 + order = pyblish.api.CollectorOrder - 0.1 def process(self, context): args = list(sys.argv[1:]) diff --git a/client/ayon_core/hosts/celaction/plugins/publish/collect_render_path.py b/client/ayon_core/hosts/celaction/plugins/publish/collect_render_path.py index 52bb183663..1bb4d54831 100644 --- a/client/ayon_core/hosts/celaction/plugins/publish/collect_render_path.py +++ b/client/ayon_core/hosts/celaction/plugins/publish/collect_render_path.py @@ -38,7 +38,7 @@ class CollectRenderPath(pyblish.api.InstancePlugin): render_path = r_template_item["path"].format_strict(anatomy_data) self.log.debug("__ render_path: `{}`".format(render_path)) - # create dir if it doesnt exists + # create dir if it doesn't exists try: if not os.path.isdir(render_dir): os.makedirs(render_dir, exist_ok=True) diff --git a/client/ayon_core/hosts/flame/api/__init__.py b/client/ayon_core/hosts/flame/api/__init__.py index e2c5ee154a..8fcf0c92b0 100644 --- a/client/ayon_core/hosts/flame/api/__init__.py +++ b/client/ayon_core/hosts/flame/api/__init__.py @@ -23,7 +23,7 @@ from .lib import ( reset_segment_selection, get_segment_attributes, get_clips_in_reels, - get_reformated_filename, + get_reformatted_filename, get_frame_from_filename, get_padding_from_filename, maintained_object_duplication, @@ -101,7 +101,7 @@ __all__ = [ "reset_segment_selection", "get_segment_attributes", "get_clips_in_reels", - "get_reformated_filename", + "get_reformatted_filename", "get_frame_from_filename", "get_padding_from_filename", "maintained_object_duplication", diff --git a/client/ayon_core/hosts/flame/api/lib.py b/client/ayon_core/hosts/flame/api/lib.py index efa23fe01e..8bfe6348ea 100644 --- a/client/ayon_core/hosts/flame/api/lib.py +++ b/client/ayon_core/hosts/flame/api/lib.py @@ -607,7 +607,7 @@ def get_clips_in_reels(project): return output_clips -def get_reformated_filename(filename, padded=True): +def get_reformatted_filename(filename, padded=True): """ Return fixed python expression path @@ -615,10 +615,10 @@ def get_reformated_filename(filename, padded=True): filename (str): file name Returns: - type: string with reformated path + type: string with reformatted path Example: - get_reformated_filename("plate.1001.exr") > plate.%04d.exr + get_reformatted_filename("plate.1001.exr") > plate.%04d.exr """ found = FRAME_PATTERN.search(filename) @@ -980,7 +980,7 @@ class MediaInfoFile(object): @property def file_pattern(self): - """Clips file patter + """Clips file pattern. Returns: str: file pattern. ex. file.[1-2].exr diff --git a/client/ayon_core/hosts/flame/api/plugin.py b/client/ayon_core/hosts/flame/api/plugin.py index c57d021c69..e656f33052 100644 --- a/client/ayon_core/hosts/flame/api/plugin.py +++ b/client/ayon_core/hosts/flame/api/plugin.py @@ -644,13 +644,13 @@ class PublishableClip: "families": [self.base_product_type, self.product_type] } - def _convert_to_entity(self, type, template): + def _convert_to_entity(self, src_type, template): """ Converting input key to key with type. """ # convert to entity type - entity_type = self.types.get(type, None) + folder_type = self.types.get(src_type, None) - assert entity_type, "Missing entity type for `{}`".format( - type + assert folder_type, "Missing folder type for `{}`".format( + src_type ) # first collect formatting data to use for formatting template @@ -661,7 +661,7 @@ class PublishableClip: formatting_data[_k] = value return { - "entity_type": entity_type, + "folder_type": folder_type, "entity_name": template.format( **formatting_data ) @@ -1018,7 +1018,7 @@ class OpenClipSolver(flib.MediaInfoFile): self.feed_version_name)) else: self.log.debug("adding new track element ..") - # create new track as it doesnt exists yet + # create new track as it doesn't exist yet # set current version to feeds on tmp tmp_xml_feeds = tmp_xml_track.find('feeds') tmp_xml_feeds.set('currentVersion', self.feed_version_name) diff --git a/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py b/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py index 1ff7ad7ccf..77a9435205 100644 --- a/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py +++ b/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py @@ -9,7 +9,7 @@ from ayon_core.lib import ( get_ayon_username, run_subprocess, ) -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts import flame as opflame diff --git a/client/ayon_core/hosts/flame/otio/flame_export.py b/client/ayon_core/hosts/flame/otio/flame_export.py index e5ea4dcf5e..cb038f9e9a 100644 --- a/client/ayon_core/hosts/flame/otio/flame_export.py +++ b/client/ayon_core/hosts/flame/otio/flame_export.py @@ -256,7 +256,7 @@ def create_otio_reference(clip_data, fps=None): if not otio_ex_ref_item: dirname, file_name = os.path.split(path) - file_name = utils.get_reformated_filename(file_name, padded=False) + file_name = utils.get_reformatted_filename(file_name, padded=False) reformated_path = os.path.join(dirname, file_name) # in case old OTIO or video file create `ExternalReference` otio_ex_ref_item = otio.schema.ExternalReference( diff --git a/client/ayon_core/hosts/flame/otio/utils.py b/client/ayon_core/hosts/flame/otio/utils.py index 7ded8e55d8..5a28263fc2 100644 --- a/client/ayon_core/hosts/flame/otio/utils.py +++ b/client/ayon_core/hosts/flame/otio/utils.py @@ -21,7 +21,7 @@ def frames_to_seconds(frames, framerate): return otio.opentime.to_seconds(rt) -def get_reformated_filename(filename, padded=True): +def get_reformatted_filename(filename, padded=True): """ Return fixed python expression path @@ -29,10 +29,10 @@ def get_reformated_filename(filename, padded=True): filename (str): file name Returns: - type: string with reformated path + type: string with reformatted path Example: - get_reformated_filename("plate.1001.exr") > plate.%04d.exr + get_reformatted_filename("plate.1001.exr") > plate.%04d.exr """ found = FRAME_PATTERN.search(filename) diff --git a/client/ayon_core/hosts/flame/plugins/create/create_shot_clip.py b/client/ayon_core/hosts/flame/plugins/create/create_shot_clip.py index e8eb2b9fab..56f5319f21 100644 --- a/client/ayon_core/hosts/flame/plugins/create/create_shot_clip.py +++ b/client/ayon_core/hosts/flame/plugins/create/create_shot_clip.py @@ -17,7 +17,7 @@ class CreateShotClip(opfapi.Creator): presets = deepcopy(self.presets) gui_inputs = self.get_gui_inputs() - # get key pares from presets and match it on ui inputs + # get key pairs from presets and match it on ui inputs for k, v in gui_inputs.items(): if v["type"] in ("dict", "section"): # nested dictionary (only one level allowed @@ -236,7 +236,7 @@ class CreateShotClip(opfapi.Creator): "type": "QCheckBox", "label": "Source resolution", "target": "tag", - "toolTip": "Is resloution taken from timeline or source?", # noqa + "toolTip": "Is resolution taken from timeline or source?", # noqa "order": 4}, } }, diff --git a/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_instances.py b/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_instances.py index 9d6560023c..ca5475824d 100644 --- a/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -37,7 +37,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): self.otio_timeline = context.data["otioTimeline"] self.fps = context.data["fps"] - # process all sellected + # process all selected for segment in selected_segments: # get openpype tag data marker_data = opfapi.get_segment_data_marker(segment) @@ -100,6 +100,12 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): marker_data["handleEnd"] = min( marker_data["handleEnd"], tail) + # Backward compatibility fix of 'entity_type' > 'folder_type' + if "parents" in marker_data: + for parent in marker_data["parents"]: + if "entity_type" in parent: + parent["folder_type"] = parent.pop("entity_type") + workfile_start = self._set_workfile_start(marker_data) with_audio = bool(marker_data.pop("audio")) diff --git a/client/ayon_core/hosts/flame/startup/openpype_babypublisher/modules/ftrack_lib.py b/client/ayon_core/hosts/flame/startup/openpype_babypublisher/modules/ftrack_lib.py index 0e84a5ef52..a66980493e 100644 --- a/client/ayon_core/hosts/flame/startup/openpype_babypublisher/modules/ftrack_lib.py +++ b/client/ayon_core/hosts/flame/startup/openpype_babypublisher/modules/ftrack_lib.py @@ -396,7 +396,7 @@ class FtrackEntityOperator: entity = session.query(query).first() - # if entity doesnt exist then create one + # if entity doesn't exist then create one if not entity: entity = self.create_ftrack_entity( session, diff --git a/client/ayon_core/hosts/fusion/api/lib.py b/client/ayon_core/hosts/fusion/api/lib.py index e5bf4b5a44..03a1eeeb65 100644 --- a/client/ayon_core/hosts/fusion/api/lib.py +++ b/client/ayon_core/hosts/fusion/api/lib.py @@ -5,7 +5,9 @@ import contextlib from ayon_core.lib import Logger -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline import registered_host +from ayon_core.pipeline.create import CreateContext +from ayon_core.pipeline.context_tools import get_current_folder_entity self = sys.modules[__name__] self._project = None @@ -52,9 +54,15 @@ def update_frame_range(start, end, comp=None, set_render_range=True, comp.SetAttrs(attrs) -def set_current_context_framerange(): +def set_current_context_framerange(folder_entity=None): """Set Comp's frame range based on current folder.""" - folder_entity = get_current_project_folder() + if folder_entity is None: + folder_entity = get_current_folder_entity( + fields={"attrib.frameStart", + "attrib.frameEnd", + "attrib.handleStart", + "attrib.handleEnd"}) + folder_attributes = folder_entity["attrib"] start = folder_attributes["frameStart"] end = folder_attributes["frameEnd"] @@ -65,9 +73,24 @@ def set_current_context_framerange(): handle_end=handle_end) -def set_current_context_resolution(): +def set_current_context_fps(folder_entity=None): + """Set Comp's frame rate (FPS) to based on current asset""" + if folder_entity is None: + folder_entity = get_current_folder_entity(fields={"attrib.fps"}) + + fps = float(folder_entity["attrib"].get("fps", 24.0)) + comp = get_current_comp() + comp.SetPrefs({ + "Comp.FrameFormat.Rate": fps, + }) + + +def set_current_context_resolution(folder_entity=None): """Set Comp's resolution width x height default based on current folder""" - folder_entity = get_current_project_folder() + if folder_entity is None: + folder_entity = get_current_folder_entity( + fields={"attrib.resolutionWidth", "attrib.resolutionHeight"}) + folder_attributes = folder_entity["attrib"] width = folder_attributes["resolutionWidth"] height = folder_attributes["resolutionHeight"] @@ -101,7 +124,7 @@ def validate_comp_prefs(comp=None, force_repair=False): "attrib.resolutionHeight", "attrib.pixelAspect", } - folder_entity = get_current_project_folder(fields=fields) + folder_entity = get_current_folder_entity(fields=fields) folder_path = folder_entity["path"] folder_attributes = folder_entity["attrib"] @@ -285,3 +308,98 @@ def comp_lock_and_undo_chunk( finally: comp.Unlock() comp.EndUndo(keep_undo) + + +def update_content_on_context_change(): + """Update all Creator instances to current asset""" + host = registered_host() + context = host.get_current_context() + + folder_path = context["folder_path"] + task = context["task_name"] + + create_context = CreateContext(host, reset=True) + + for instance in create_context.instances: + instance_folder_path = instance.get("folderPath") + if instance_folder_path and instance_folder_path != folder_path: + instance["folderPath"] = folder_path + instance_task = instance.get("task") + if instance_task and instance_task != task: + instance["task"] = task + + create_context.save_changes() + + +def prompt_reset_context(): + """Prompt the user what context settings to reset. + This prompt is used on saving to a different task to allow the scene to + get matched to the new context. + """ + # TODO: Cleanup this prototyped mess of imports and odd dialog + from ayon_core.tools.attribute_defs.dialog import ( + AttributeDefinitionsDialog + ) + from ayon_core.style import load_stylesheet + from ayon_core.lib import BoolDef, UILabelDef + from qtpy import QtWidgets, QtCore + + definitions = [ + UILabelDef( + label=( + "You are saving your workfile into a different folder or task." + "\n\n" + "Would you like to update some settings to the new context?\n" + ) + ), + BoolDef( + "fps", + label="FPS", + tooltip="Reset Comp FPS", + default=True + ), + BoolDef( + "frame_range", + label="Frame Range", + tooltip="Reset Comp start and end frame ranges", + default=True + ), + BoolDef( + "resolution", + label="Comp Resolution", + tooltip="Reset Comp resolution", + default=True + ), + BoolDef( + "instances", + label="Publish instances", + tooltip="Update all publish instance's folder and task to match " + "the new folder and task", + default=True + ), + ] + + dialog = AttributeDefinitionsDialog(definitions) + dialog.setWindowFlags( + dialog.windowFlags() | QtCore.Qt.WindowStaysOnTopHint + ) + dialog.setWindowTitle("Saving to different context.") + dialog.setStyleSheet(load_stylesheet()) + if not dialog.exec_(): + return None + + options = dialog.get_values() + folder_entity = get_current_folder_entity() + if options["frame_range"]: + set_current_context_framerange(folder_entity) + + if options["fps"]: + set_current_context_fps(folder_entity) + + if options["resolution"]: + set_current_context_resolution(folder_entity) + + if options["instances"]: + update_content_on_context_change() + + dialog.deleteLater() diff --git a/client/ayon_core/hosts/fusion/api/pipeline.py b/client/ayon_core/hosts/fusion/api/pipeline.py index 50157cfae6..2d1073ec7d 100644 --- a/client/ayon_core/hosts/fusion/api/pipeline.py +++ b/client/ayon_core/hosts/fusion/api/pipeline.py @@ -5,6 +5,7 @@ import os import sys import logging import contextlib +from pathlib import Path import pyblish.api from qtpy import QtCore @@ -28,8 +29,8 @@ from ayon_core.tools.utils import host_tools from .lib import ( get_current_comp, - comp_lock_and_undo_chunk, - validate_comp_prefs + validate_comp_prefs, + prompt_reset_context ) log = Logger.get_logger(__name__) @@ -41,6 +42,9 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") +# Track whether the workfile tool is about to save +_about_to_save = False + class FusionLogHandler(logging.Handler): # Keep a reference to fusion's Print function (Remote Object) @@ -104,8 +108,10 @@ class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): # Register events register_event_callback("open", on_after_open) + register_event_callback("workfile.save.before", before_workfile_save) register_event_callback("save", on_save) register_event_callback("new", on_new) + register_event_callback("taskChanged", on_task_changed) # region workfile io api def has_unsaved_changes(self): @@ -169,6 +175,19 @@ def on_save(event): comp = event["sender"] validate_comp_prefs(comp) + # We are now starting the actual save directly + global _about_to_save + _about_to_save = False + + +def on_task_changed(): + global _about_to_save + print(f"Task changed: {_about_to_save}") + # TODO: Only do this if not headless + if _about_to_save: + # Let's prompt the user to update the context settings or not + prompt_reset_context() + def on_after_open(event): comp = event["sender"] @@ -202,6 +221,28 @@ def on_after_open(event): dialog.setStyleSheet(load_stylesheet()) +def before_workfile_save(event): + # Due to Fusion's external python process design we can't really + # detect whether the current Fusion environment matches the one the artists + # expects it to be. For example, our pipeline python process might + # have been shut down, and restarted - which will restart it to the + # environment Fusion started with; not necessarily where the artist + # is currently working. + # The `_about_to_save` var is used to detect context changes when + # saving into another asset. If we keep it False it will be ignored + # as context change. As such, before we change tasks we will only + # consider it the current filepath is within the currently known + # AVALON_WORKDIR. This way we avoid false positives of thinking it's + # saving to another context and instead sometimes just have false negatives + # where we fail to show the "Update on task change" prompt. + comp = get_current_comp() + filepath = comp.GetAttrs()["COMPS_FileName"] + workdir = os.environ.get("AYON_WORKDIR") + if Path(workdir) in Path(filepath).parents: + global _about_to_save + _about_to_save = True + + def ls(): """List containers from active Fusion scene @@ -338,7 +379,6 @@ class FusionEventHandler(QtCore.QObject): >>> handler = FusionEventHandler(parent=window) >>> handler.start() - """ ACTION_IDS = [ "Comp_Save", diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py index 10b1c9c45d..1064d0a83a 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py @@ -7,7 +7,7 @@ from ayon_core.hosts.fusion import ( FUSION_VERSIONS_DICT, get_fusion_version, ) -from ayon_core.lib.applications import ( +from ayon_applications import ( PreLaunchHook, LaunchTypes, ApplicationLaunchFailed, diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py index 5e97ae3de1..ef084b0483 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import ( +from ayon_applications import ( PreLaunchHook, LaunchTypes, ApplicationLaunchFailed, diff --git a/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py b/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py index a9db39e24e..ab12078c43 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py @@ -3,7 +3,7 @@ import subprocess import platform import uuid -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class InstallPySideToFusion(PreLaunchHook): diff --git a/client/ayon_core/hosts/fusion/plugins/create/create_saver.py b/client/ayon_core/hosts/fusion/plugins/create/create_saver.py index b6cda1f302..20c7b99851 100644 --- a/client/ayon_core/hosts/fusion/plugins/create/create_saver.py +++ b/client/ayon_core/hosts/fusion/plugins/create/create_saver.py @@ -1,6 +1,11 @@ -from ayon_core.lib import EnumDef +from ayon_core.lib import ( + UILabelDef, + NumberDef, + EnumDef +) from ayon_core.hosts.fusion.api.plugin import GenericCreateSaver +from ayon_core.hosts.fusion.api.lib import get_current_comp class CreateSaver(GenericCreateSaver): @@ -45,6 +50,7 @@ class CreateSaver(GenericCreateSaver): self._get_reviewable_bool(), self._get_frame_range_enum(), self._get_image_format_enum(), + *self._get_custom_frame_range_attribute_defs() ] return attr_defs @@ -53,6 +59,7 @@ class CreateSaver(GenericCreateSaver): "current_folder": "Current Folder context", "render_range": "From render in/out", "comp_range": "From composition timeline", + "custom_range": "Custom frame range", } return EnumDef( @@ -61,3 +68,82 @@ class CreateSaver(GenericCreateSaver): label="Frame range source", default=self.default_frame_range_option ) + + @staticmethod + def _get_custom_frame_range_attribute_defs() -> list: + + # Define custom frame range defaults based on current comp + # timeline settings (if a comp is currently open) + comp = get_current_comp() + if comp is not None: + attrs = comp.GetAttrs() + frame_defaults = { + "frameStart": int(attrs["COMPN_GlobalStart"]), + "frameEnd": int(attrs["COMPN_GlobalEnd"]), + "handleStart": int( + attrs["COMPN_RenderStart"] - attrs["COMPN_GlobalStart"] + ), + "handleEnd": int( + attrs["COMPN_GlobalEnd"] - attrs["COMPN_RenderEnd"] + ), + } + else: + frame_defaults = { + "frameStart": 1001, + "frameEnd": 1100, + "handleStart": 0, + "handleEnd": 0 + } + + return [ + UILabelDef( + label="
Custom Frame Range
" + "only used with 'Custom frame range' source" + ), + NumberDef( + "custom_frameStart", + label="Frame Start", + default=frame_defaults["frameStart"], + minimum=0, + decimals=0, + tooltip=( + "Set the start frame for the export.\n" + "Only used if frame range source is 'Custom frame range'." + ) + ), + NumberDef( + "custom_frameEnd", + label="Frame End", + default=frame_defaults["frameEnd"], + minimum=0, + decimals=0, + tooltip=( + "Set the end frame for the export.\n" + "Only used if frame range source is 'Custom frame range'." + ) + ), + NumberDef( + "custom_handleStart", + label="Handle Start", + default=frame_defaults["handleStart"], + minimum=0, + decimals=0, + tooltip=( + "Set the start handles for the export, this will be " + "added before the start frame.\n" + "Only used if frame range source is 'Custom frame range'." + ) + ), + NumberDef( + "custom_handleEnd", + label="Handle End", + default=frame_defaults["handleEnd"], + minimum=0, + decimals=0, + tooltip=( + "Set the end handles for the export, this will be added " + "after the end frame.\n" + "Only used if frame range source is 'Custom frame range'." + ) + ) + ] diff --git a/client/ayon_core/hosts/fusion/plugins/publish/collect_instances.py b/client/ayon_core/hosts/fusion/plugins/publish/collect_instances.py index 51d7e68fb6..921c282877 100644 --- a/client/ayon_core/hosts/fusion/plugins/publish/collect_instances.py +++ b/client/ayon_core/hosts/fusion/plugins/publish/collect_instances.py @@ -57,6 +57,14 @@ class CollectInstanceData(pyblish.api.InstancePlugin): start_with_handle = comp_start end_with_handle = comp_end + if frame_range_source == "custom_range": + start = int(instance.data["custom_frameStart"]) + end = int(instance.data["custom_frameEnd"]) + handle_start = int(instance.data["custom_handleStart"]) + handle_end = int(instance.data["custom_handleEnd"]) + start_with_handle = start - handle_start + end_with_handle = end + handle_end + frame = instance.data["creator_attributes"].get("frame") # explicitly publishing only single frame if frame is not None: diff --git a/client/ayon_core/hosts/harmony/api/lib.py b/client/ayon_core/hosts/harmony/api/lib.py index 3c833c7b69..f9980cb65e 100644 --- a/client/ayon_core/hosts/harmony/api/lib.py +++ b/client/ayon_core/hosts/harmony/api/lib.py @@ -568,7 +568,7 @@ def save_scene(): """Save the Harmony scene safely. The built-in (to Avalon) background zip and moving of the Harmony scene - folder, interfers with server/client communication by sending two requests + folder, interferes with server/client communication by sending two requests at the same time. This only happens when sending "scene.saveAll()". This method prevents this double request and safely saves the scene. diff --git a/client/ayon_core/hosts/harmony/api/pipeline.py b/client/ayon_core/hosts/harmony/api/pipeline.py index d842ccd414..1e3ea0ba21 100644 --- a/client/ayon_core/hosts/harmony/api/pipeline.py +++ b/client/ayon_core/hosts/harmony/api/pipeline.py @@ -13,7 +13,7 @@ from ayon_core.pipeline import ( AVALON_CONTAINER_ID, ) from ayon_core.pipeline.load import get_outdated_containers -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.hosts.harmony import HARMONY_ADDON_ROOT import ayon_core.hosts.harmony.api as harmony @@ -50,7 +50,7 @@ def get_current_context_settings(): """ - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() folder_attributes = folder_entity["attrib"] fps = folder_attributes.get("fps") diff --git a/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py b/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py index bbad14084a..4d38cd09b3 100644 --- a/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py +++ b/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py @@ -6,10 +6,7 @@ from ayon_core.lib import ( get_ayon_launcher_args, is_using_ayon_console, ) -from ayon_core.lib.applications import ( - PreLaunchHook, - LaunchTypes, -) +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.harmony import get_launch_script_path diff --git a/client/ayon_core/hosts/harmony/plugins/create/create_farm_render.py b/client/ayon_core/hosts/harmony/plugins/create/create_farm_render.py index 16c403de6a..3039d56ead 100644 --- a/client/ayon_core/hosts/harmony/plugins/create/create_farm_render.py +++ b/client/ayon_core/hosts/harmony/plugins/create/create_farm_render.py @@ -21,12 +21,12 @@ class CreateFarmRender(plugin.Creator): path = "render/{0}/{0}.".format(node.split("/")[-1]) harmony.send( { - "function": f"PypeHarmony.Creators.CreateRender.create", + "function": "PypeHarmony.Creators.CreateRender.create", "args": [node, path] }) harmony.send( { - "function": f"PypeHarmony.color", + "function": "PypeHarmony.color", "args": [[0.9, 0.75, 0.3, 1.0]] } ) diff --git a/client/ayon_core/hosts/harmony/plugins/publish/collect_audio.py b/client/ayon_core/hosts/harmony/plugins/publish/collect_audio.py index 40b4107a62..cc959a23b9 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/collect_audio.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/collect_audio.py @@ -1,8 +1,8 @@ import os -import pyblish.api import pyblish.api + class CollectAudio(pyblish.api.InstancePlugin): """ Collect relative path for audio file to instance. diff --git a/client/ayon_core/hosts/harmony/plugins/publish/collect_scene.py b/client/ayon_core/hosts/harmony/plugins/publish/collect_scene.py index a60e44b69b..bc2ccca1be 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/collect_scene.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/collect_scene.py @@ -17,7 +17,7 @@ class CollectScene(pyblish.api.ContextPlugin): """Plugin entry point.""" result = harmony.send( { - f"function": "PypeHarmony.getSceneSettings", + "function": "PypeHarmony.getSceneSettings", "args": []} )["result"] @@ -62,7 +62,7 @@ class CollectScene(pyblish.api.ContextPlugin): result = harmony.send( { - f"function": "PypeHarmony.getVersion", + "function": "PypeHarmony.getVersion", "args": []} )["result"] context.data["harmonyVersion"] = "{}.{}".format(result[0], result[1]) diff --git a/client/ayon_core/hosts/hiero/api/events.py b/client/ayon_core/hosts/hiero/api/events.py index 0e509747d5..304605e24e 100644 --- a/client/ayon_core/hosts/hiero/api/events.py +++ b/client/ayon_core/hosts/hiero/api/events.py @@ -1,10 +1,12 @@ import os + import hiero.core.events + from ayon_core.lib import Logger, register_event_callback + from .lib import ( sync_avalon_data_to_workfile, launch_workfiles_app, - selection_changed_timeline, before_project_save, ) from .tags import add_tags_to_workfile diff --git a/client/ayon_core/hosts/hiero/api/lib.py b/client/ayon_core/hosts/hiero/api/lib.py index 8e08e8cbf3..8682ff7780 100644 --- a/client/ayon_core/hosts/hiero/api/lib.py +++ b/client/ayon_core/hosts/hiero/api/lib.py @@ -166,7 +166,7 @@ def get_current_track(sequence, name, audio=False): Creates new if none is found. Args: - sequence (hiero.core.Sequence): hiero sequene object + sequence (hiero.core.Sequence): hiero sequence object name (str): name of track we want to return audio (bool)[optional]: switch to AudioTrack @@ -248,8 +248,12 @@ def get_track_items( # collect all available active sequence track items if not return_list: sequence = get_current_sequence(name=sequence_name) - # get all available tracks from sequence - tracks = list(sequence.audioTracks()) + list(sequence.videoTracks()) + tracks = [] + if sequence is not None: + # get all available tracks from sequence + tracks.extend(sequence.audioTracks()) + tracks.extend(sequence.videoTracks()) + # loop all tracks for track in tracks: if check_locked and track.isLocked(): @@ -846,8 +850,8 @@ def create_nuke_workfile_clips(nuke_workfiles, seq=None): [{ 'path': 'P:/Jakub_testy_pipeline/test_v01.nk', 'name': 'test', - 'handleStart': 15, # added asymetrically to handles - 'handleEnd': 10, # added asymetrically to handles + 'handleStart': 15, # added asymmetrically to handles + 'handleEnd': 10, # added asymmetrically to handles "clipIn": 16, "frameStart": 991, "frameEnd": 1023, @@ -1192,7 +1196,7 @@ def get_sequence_pattern_and_padding(file): Return: string: any matching sequence pattern - int: padding of sequnce numbering + int: padding of sequence numbering """ foundall = re.findall( r"(#+)|(%\d+d)|(?<=[^a-zA-Z0-9])(\d+)(?=\.\w+$)", file) diff --git a/client/ayon_core/hosts/hiero/api/otio/hiero_import.py b/client/ayon_core/hosts/hiero/api/otio/hiero_import.py index 257c434011..29ff7f7325 100644 --- a/client/ayon_core/hosts/hiero/api/otio/hiero_import.py +++ b/client/ayon_core/hosts/hiero/api/otio/hiero_import.py @@ -90,7 +90,7 @@ def apply_transition(otio_track, otio_item, track): if isinstance(track, hiero.core.AudioTrack): kind = 'Audio' - # Gather TrackItems involved in trasition + # Gather TrackItems involved in transition item_in, item_out = get_neighboring_trackitems( otio_item, otio_track, @@ -101,7 +101,7 @@ def apply_transition(otio_track, otio_item, track): if transition_type == 'dissolve': transition_func = getattr( hiero.core.Transition, - 'create{kind}DissolveTransition'.format(kind=kind) + "create{kind}DissolveTransition".format(kind=kind) ) try: @@ -109,7 +109,7 @@ def apply_transition(otio_track, otio_item, track): item_in, item_out, otio_item.in_offset.value, - otio_item.out_offset.value + otio_item.out_offset.value, ) # Catch error raised if transition is bigger than TrackItem source @@ -134,7 +134,7 @@ def apply_transition(otio_track, otio_item, track): transition = transition_func( item_out, - otio_item.out_offset.value + otio_item.out_offset.value, ) elif transition_type == 'fade_out': @@ -183,9 +183,7 @@ def prep_url(url_in): def create_offline_mediasource(otio_clip, path=None): global _otio_old - hiero_rate = hiero.core.TimeBase( - otio_clip.source_range.start_time.rate - ) + hiero_rate = hiero.core.TimeBase(otio_clip.source_range.start_time.rate) try: legal_media_refs = ( @@ -212,7 +210,7 @@ def create_offline_mediasource(otio_clip, path=None): source_range.start_time.value, source_range.duration.value, hiero_rate, - source_range.start_time.value + source_range.start_time.value, ) return media @@ -385,7 +383,8 @@ def create_trackitem(playhead, track, otio_clip, clip): # Only reverse effect can be applied here if abs(time_scalar) == 1.: trackitem.setPlaybackSpeed( - trackitem.playbackSpeed() * time_scalar) + trackitem.playbackSpeed() * time_scalar + ) elif isinstance(effect, otio.schema.FreezeFrame): # For freeze frame, playback speed must be set after range @@ -397,28 +396,21 @@ def create_trackitem(playhead, track, otio_clip, clip): source_in = source_range.end_time_inclusive().value timeline_in = playhead + source_out - timeline_out = ( - timeline_in + - source_range.duration.value - ) - 1 + timeline_out = (timeline_in + source_range.duration.value) - 1 else: # Normal playback speed source_in = source_range.start_time.value source_out = source_range.end_time_inclusive().value timeline_in = playhead - timeline_out = ( - timeline_in + - source_range.duration.value - ) - 1 + timeline_out = (timeline_in + source_range.duration.value) - 1 # Set source and timeline in/out points trackitem.setTimes( timeline_in, timeline_out, source_in, - source_out - + source_out, ) # Apply playback speed for freeze frames @@ -435,7 +427,8 @@ def create_trackitem(playhead, track, otio_clip, clip): def build_sequence( - otio_timeline, project=None, sequence=None, track_kind=None): + otio_timeline, project=None, sequence=None, track_kind=None +): if project is None: if sequence: project = sequence.project() @@ -509,10 +502,7 @@ def build_sequence( # Create TrackItem trackitem = create_trackitem( - playhead, - track, - otio_clip, - clip + playhead, track, otio_clip, clip ) # Add markers diff --git a/client/ayon_core/hosts/hiero/api/otio/utils.py b/client/ayon_core/hosts/hiero/api/otio/utils.py index 4c5d46bd51..f7cb58f1e8 100644 --- a/client/ayon_core/hosts/hiero/api/otio/utils.py +++ b/client/ayon_core/hosts/hiero/api/otio/utils.py @@ -25,7 +25,7 @@ def get_reformated_path(path, padded=True): path (str): path url or simple file name Returns: - type: string with reformated path + type: string with reformatted path Example: get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr diff --git a/client/ayon_core/hosts/hiero/api/plugin.py b/client/ayon_core/hosts/hiero/api/plugin.py index 4878368716..1353673b31 100644 --- a/client/ayon_core/hosts/hiero/api/plugin.py +++ b/client/ayon_core/hosts/hiero/api/plugin.py @@ -449,7 +449,6 @@ class ClipLoader: repr = self.context["representation"] repr_cntx = repr["context"] folder_path = self.context["folder"]["path"] - folder_name = self.context["folder"]["name"] product_name = self.context["product"]["name"] representation = repr["name"] self.data["clip_name"] = self.clip_name_template.format(**repr_cntx) @@ -906,16 +905,16 @@ class PublishClip: "hierarchyData": hierarchy_formatting_data, "productName": self.product_name, "productType": self.product_type, - "families": [self.product_type, self.data["family"]] + "families": [self.product_type, self.data["productType"]] } - def _convert_to_entity(self, type, template): + def _convert_to_entity(self, src_type, template): """ Converting input key to key with type. """ # convert to entity type - entity_type = self.types.get(type, None) + folder_type = self.types.get(src_type, None) - assert entity_type, "Missing entity type for `{}`".format( - type + assert folder_type, "Missing folder type for `{}`".format( + src_type ) # first collect formatting data to use for formatting template @@ -926,7 +925,7 @@ class PublishClip: formatting_data[_k] = value return { - "entity_type": entity_type, + "folder_type": folder_type, "entity_name": template.format( **formatting_data ) diff --git a/client/ayon_core/hosts/hiero/api/startup/Python/Startup/SpreadsheetExport.py b/client/ayon_core/hosts/hiero/api/startup/Python/Startup/SpreadsheetExport.py index 9c919e7cb4..6a8057ec1e 100644 --- a/client/ayon_core/hosts/hiero/api/startup/Python/Startup/SpreadsheetExport.py +++ b/client/ayon_core/hosts/hiero/api/startup/Python/Startup/SpreadsheetExport.py @@ -3,9 +3,11 @@ # Note: This only prints the text data that is visible in the active Spreadsheet View. # If you've filtered text, only the visible text will be printed to the CSV file # Usage: Copy to ~/.hiero/Python/StartupUI +import os +import csv + import hiero.core.events import hiero.ui -import os, csv try: from PySide.QtGui import * from PySide.QtCore import * diff --git a/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/PimpMySpreadsheet.py b/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/PimpMySpreadsheet.py index b8dfb07b47..fcfa24310e 100644 --- a/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/PimpMySpreadsheet.py +++ b/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/PimpMySpreadsheet.py @@ -641,7 +641,7 @@ def _setStatus(self, status): global gStatusTags # Get a valid Tag object from the Global list of statuses - if not status in gStatusTags.keys(): + if status not in gStatusTags.keys(): print("Status requested was not a valid Status string.") return diff --git a/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/otioimporter/OTIOImport.py b/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/otioimporter/OTIOImport.py index 17c044f3ec..d2fe608d99 100644 --- a/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/otioimporter/OTIOImport.py +++ b/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/otioimporter/OTIOImport.py @@ -90,7 +90,7 @@ def apply_transition(otio_track, otio_item, track): kind = "Audio" try: - # Gather TrackItems involved in trasition + # Gather TrackItems involved in transition item_in, item_out = get_neighboring_trackitems( otio_item, otio_track, @@ -101,14 +101,14 @@ def apply_transition(otio_track, otio_item, track): if transition_type == "dissolve": transition_func = getattr( hiero.core.Transition, - 'create{kind}DissolveTransition'.format(kind=kind) + "create{kind}DissolveTransition".format(kind=kind) ) transition = transition_func( item_in, item_out, otio_item.in_offset.value, - otio_item.out_offset.value + otio_item.out_offset.value, ) elif transition_type == "fade_in": @@ -116,20 +116,14 @@ def apply_transition(otio_track, otio_item, track): hiero.core.Transition, 'create{kind}FadeInTransition'.format(kind=kind) ) - transition = transition_func( - item_out, - otio_item.out_offset.value - ) + transition = transition_func(item_out, otio_item.out_offset.value) elif transition_type == "fade_out": transition_func = getattr( hiero.core.Transition, - 'create{kind}FadeOutTransition'.format(kind=kind) - ) - transition = transition_func( - item_in, - otio_item.in_offset.value + "create{kind}FadeOutTransition".format(kind=kind) ) + transition = transition_func(item_in, otio_item.in_offset.value) else: # Unknown transition @@ -138,11 +132,10 @@ def apply_transition(otio_track, otio_item, track): # Apply transition to track track.addTransition(transition) - except Exception, e: + except Exception as e: sys.stderr.write( 'Unable to apply transition "{t}": "{e}"\n'.format( - t=otio_item, - e=e + t=otio_item, e=e ) ) @@ -153,18 +146,14 @@ def prep_url(url_in): if url.startswith("file://localhost/"): return url.replace("file://localhost/", "") - url = '{url}'.format( - sep=url.startswith(os.sep) and "" or os.sep, - url=url.startswith(os.sep) and url[1:] or url - ) + if url.startswith(os.sep): + url = url[1:] return url def create_offline_mediasource(otio_clip, path=None): - hiero_rate = hiero.core.TimeBase( - otio_clip.source_range.start_time.rate - ) + hiero_rate = hiero.core.TimeBase(otio_clip.source_range.start_time.rate) if isinstance(otio_clip.media_reference, otio.schema.ExternalReference): source_range = otio_clip.available_range() @@ -180,7 +169,7 @@ def create_offline_mediasource(otio_clip, path=None): source_range.start_time.value, source_range.duration.value, hiero_rate, - source_range.start_time.value + source_range.start_time.value, ) return media @@ -203,7 +192,7 @@ marker_color_map = { "MAGENTA": "Magenta", "BLACK": "Blue", "WHITE": "Green", - "MINT": "Cyan" + "MINT": "Cyan", } @@ -254,12 +243,6 @@ def add_markers(otio_item, hiero_item, tagsbin): if _tag is None: _tag = hiero.core.Tag(marker_color_map[marker.color]) - start = marker.marked_range.start_time.value - end = ( - marker.marked_range.start_time.value + - marker.marked_range.duration.value - ) - tag = hiero_item.addTag(_tag) tag.setName(marker.name or marker_color_map[marker_color]) @@ -275,12 +258,12 @@ def create_track(otio_track, tracknum, track_kind): # Create a Track if otio_track.kind == otio.schema.TrackKind.Video: track = hiero.core.VideoTrack( - otio_track.name or 'Video{n}'.format(n=tracknum) + otio_track.name or "Video{n}".format(n=tracknum) ) else: track = hiero.core.AudioTrack( - otio_track.name or 'Audio{n}'.format(n=tracknum) + otio_track.name or "Audio{n}".format(n=tracknum) ) return track @@ -315,34 +298,25 @@ def create_trackitem(playhead, track, otio_clip, clip, tagsbin): for effect in otio_clip.effects: if isinstance(effect, otio.schema.LinearTimeWarp): trackitem.setPlaybackSpeed( - trackitem.playbackSpeed() * - effect.time_scalar + trackitem.playbackSpeed() * effect.time_scalar ) # If reverse playback speed swap source in and out if trackitem.playbackSpeed() < 0: source_out = source_range.start_time.value source_in = ( - source_range.start_time.value + - source_range.duration.value + source_range.start_time.value + source_range.duration.value ) - 1 timeline_in = playhead + source_out - timeline_out = ( - timeline_in + - source_range.duration.value - ) - 1 + timeline_out = (timeline_in + source_range.duration.value) - 1 else: # Normal playback speed source_in = source_range.start_time.value source_out = ( - source_range.start_time.value + - source_range.duration.value + source_range.start_time.value + source_range.duration.value ) - 1 timeline_in = playhead - timeline_out = ( - timeline_in + - source_range.duration.value - ) - 1 + timeline_out = (timeline_in + source_range.duration.value) - 1 # Set source and timeline in/out points trackitem.setSourceIn(source_in) @@ -357,7 +331,8 @@ def create_trackitem(playhead, track, otio_clip, clip, tagsbin): def build_sequence( - otio_timeline, project=None, sequence=None, track_kind=None): + otio_timeline, project=None, sequence=None, track_kind=None +): if project is None: if sequence: @@ -414,8 +389,7 @@ def build_sequence( if isinstance(otio_clip, otio.schema.Stack): bar = hiero.ui.mainWindow().statusBar() bar.showMessage( - "Nested sequences are created separately.", - timeout=3000 + "Nested sequences are created separately.", timeout=3000 ) build_sequence(otio_clip, project, otio_track.kind) @@ -428,11 +402,7 @@ def build_sequence( # Create TrackItem trackitem = create_trackitem( - playhead, - track, - otio_clip, - clip, - tagsbin + playhead, track, otio_clip, clip, tagsbin ) # Add trackitem to track diff --git a/client/ayon_core/hosts/hiero/api/tags.py b/client/ayon_core/hosts/hiero/api/tags.py index 32620aa2f5..5abfee75d0 100644 --- a/client/ayon_core/hosts/hiero/api/tags.py +++ b/client/ayon_core/hosts/hiero/api/tags.py @@ -89,7 +89,7 @@ def update_tag(tag, data): # set all data metadata to tag metadata for _k, _v in data_mtd.items(): value = str(_v) - if type(_v) == dict: + if isinstance(_v, dict): value = json.dumps(_v) # set the value diff --git a/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py b/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py index 62e7041286..2985a81317 100644 --- a/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py +++ b/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py @@ -166,7 +166,7 @@ class CreateShotClip(phiero.Creator): "type": "QCheckBox", "label": "Source resolution", "target": "tag", - "toolTip": "Is resloution taken from timeline or source?", # noqa + "toolTip": "Is resolution taken from timeline or source?", # noqa "order": 4}, } }, @@ -211,7 +211,7 @@ class CreateShotClip(phiero.Creator): presets = deepcopy(self.presets) gui_inputs = deepcopy(self.gui_inputs) - # get key pares from presets and match it on ui inputs + # get key pairs from presets and match it on ui inputs for k, v in gui_inputs.items(): if v["type"] in ("dict", "section"): # nested dictionary (only one level allowed diff --git a/client/ayon_core/hosts/hiero/plugins/publish/collect_clip_effects.py b/client/ayon_core/hosts/hiero/plugins/publish/collect_clip_effects.py index 32b4864022..bfc63f2551 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/collect_clip_effects.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/collect_clip_effects.py @@ -1,5 +1,5 @@ -from itertools import product import re + import pyblish.api diff --git a/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py b/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py index d921f37934..67e1f18cbf 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py @@ -43,7 +43,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): tracks_effect_items = self.collect_sub_track_items(all_tracks) context.data["tracksEffectItems"] = tracks_effect_items - # process all sellected timeline track items + # process all selected timeline track items for track_item in selected_timeline_items: data = {} clip_name = track_item.name() @@ -62,7 +62,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): }: continue - # get clips subtracks and anotations + # get clips subtracks and annotations annotations = self.clip_annotations(source_clip) subtracks = self.clip_subtrack(track_item) self.log.debug("Annotations: {}".format(annotations)) @@ -84,8 +84,13 @@ class PrecollectInstances(pyblish.api.ContextPlugin): k: v for k, v in tag_data.items() if k not in ("id", "applieswhole", "label") }) + # Backward compatibility fix of 'entity_type' > 'folder_type' + if "parents" in data: + for parent in data["parents"]: + if "entity_type" in parent: + parent["folder_type"] = parent.pop("entity_type") - asset, asset_name = self._get_folder_data(tag_data) + folder_path, folder_name = self._get_folder_data(tag_data) product_name = tag_data.get("productName") if product_name is None: @@ -93,12 +98,6 @@ class PrecollectInstances(pyblish.api.ContextPlugin): families = [str(f) for f in tag_data["families"]] - # form label - label = "{} -".format(asset) - if asset_name != clip_name: - label += " ({})".format(clip_name) - label += " {}".format(product_name) - # TODO: remove backward compatibility product_name = tag_data.get("productName") if product_name is None: @@ -108,7 +107,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # backward compatibility: product_name should not be missing if not product_name: self.log.error( - "Product name is not defined for: {}".format(asset)) + "Product name is not defined for: {}".format(folder_path)) # TODO: remove backward compatibility product_type = tag_data.get("productType") @@ -119,15 +118,21 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # backward compatibility: product_type should not be missing if not product_type: self.log.error( - "Product type is not defined for: {}".format(asset)) + "Product type is not defined for: {}".format(folder_path)) + + # form label + label = "{} -".format(folder_path) + if folder_name != clip_name: + label += " ({})".format(clip_name) + label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, product_name), + "name": "{}_{}".format(folder_path, product_name), "label": label, - "folderPath": asset, - "asset_name": asset_name, "productName": product_name, "productType": product_type, + "folderPath": folder_path, + "asset_name": folder_name, "item": track_item, "families": families, "publish": tag_data["publish"], @@ -217,19 +222,19 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not hierarchy_data: return - asset = data["folderPath"] - asset_name = data["asset_name"] + folder_path = data["folderPath"] + folder_name = data["asset_name"] product_type = "shot" # form label - label = "{} -".format(asset) - if asset_name != clip_name: + label = "{} -".format(folder_path) + if folder_name != clip_name: label += " ({}) ".format(clip_name) label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, product_name), + "name": "{}_{}".format(folder_path, product_name), "label": label, "productName": product_name, "productType": product_type, @@ -276,19 +281,19 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not self.test_any_audio(item): return - asset = data["folderPath"] + folder_path = data["folderPath"] asset_name = data["asset_name"] product_type = "audio" # form label - label = "{} -".format(asset) + label = "{} -".format(folder_path) if asset_name != clip_name: label += " ({}) ".format(clip_name) label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, product_name), + "name": "{}_{}".format(folder_path, subset), "label": label, "productName": product_name, "productType": product_type, @@ -378,12 +383,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # collect all subtrack items sub_track_items = {} for track in tracks: - items = track.items() - - effet_items = track.subTrackItems() + effect_items = track.subTrackItems() # skip if no clips on track > need track with effect only - if not effet_items: + if not effect_items: continue # skip all disabled tracks @@ -391,7 +394,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): continue track_index = track.trackIndex() - _sub_track_items = phiero.flatten(effet_items) + _sub_track_items = phiero.flatten(effect_items) _sub_track_items = list(_sub_track_items) # continue only if any subtrack items are collected @@ -439,10 +442,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin): for item in subTrackItems: if "TimeWarp" in item.name(): continue - # avoid all anotation + # avoid all annotation if isinstance(item, hiero.core.Annotation): continue - # # avoid all not anaibled + # avoid all disabled if not item.isEnabled(): continue subtracks.append(item) diff --git a/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py b/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py index 8df6cd4261..0b6b34ea6c 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py @@ -17,8 +17,8 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.491 def process(self, context): - asset = context.data["folderPath"] - asset_name = asset.split("/")[-1] + folder_path = context.data["folderPath"] + folder_name = folder_path.split("/")[-1] active_timeline = hiero.ui.activeSequence() project = active_timeline.project() @@ -62,12 +62,12 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): product_type = "workfile" instance_data = { "label": "{} - {}Main".format( - asset, product_type), - "name": "{}_{}".format(asset_name, product_type), - "folderPath": context.data["folderPath"], + folder_path, product_type), + "name": "{}_{}".format(folder_name, product_type), + "folderPath": folder_path, # TODO use 'get_product_name' "productName": "{}{}Main".format( - asset_name, product_type.capitalize() + folder_name, product_type.capitalize() ), "item": project, "productType": product_type, diff --git a/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/precollect_retime.py b/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/precollect_retime.py index 297ffa8001..8503a0b6a7 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/precollect_retime.py +++ b/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/precollect_retime.py @@ -35,10 +35,6 @@ class PrecollectRetime(api.InstancePlugin): source_out = int(track_item.sourceOut()) speed = track_item.playbackSpeed() - # calculate available material before retime - available_in = int(track_item.handleInLength() * speed) - available_out = int(track_item.handleOutLength() * speed) - self.log.debug(( "_BEFORE: \n timeline_in: `{0}`,\n timeline_out: `{1}`, \n " "source_in: `{2}`,\n source_out: `{3}`,\n speed: `{4}`,\n " diff --git a/client/ayon_core/hosts/houdini/api/creator_node_shelves.py b/client/ayon_core/hosts/houdini/api/creator_node_shelves.py index 6e48cb375b..72c157f187 100644 --- a/client/ayon_core/hosts/houdini/api/creator_node_shelves.py +++ b/client/ayon_core/hosts/houdini/api/creator_node_shelves.py @@ -91,7 +91,7 @@ def create_interactive(creator_identifier, **kwargs): pane = stateutils.activePane(kwargs) if isinstance(pane, hou.NetworkEditor): pwd = pane.pwd() - project_name = context.get_current_project_name(), + project_name = context.get_current_project_name() folder_path = context.get_current_folder_path() task_name = context.get_current_task_name() folder_entity = ayon_api.get_folder_by_path( diff --git a/client/ayon_core/hosts/houdini/api/lib.py b/client/ayon_core/hosts/houdini/api/lib.py index 681052a44d..972b10ad1b 100644 --- a/client/ayon_core/hosts/houdini/api/lib.py +++ b/client/ayon_core/hosts/houdini/api/lib.py @@ -3,7 +3,6 @@ import sys import os import errno import re -import uuid import logging import json from contextlib import contextmanager @@ -23,7 +22,7 @@ from ayon_core.pipeline import ( ) from ayon_core.pipeline.create import CreateContext from ayon_core.pipeline.template_data import get_template_data -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.tools.utils import PopupUpdateKeys, SimplePopup from ayon_core.tools.utils.host_tools import get_tool_by_name @@ -40,88 +39,10 @@ def get_folder_fps(folder_entity=None): """Return current folder fps.""" if folder_entity is None: - folder_entity = get_current_project_folder(fields=["attrib.fps"]) + folder_entity = get_current_folder_entity(fields=["attrib.fps"]) return folder_entity["attrib"]["fps"] -def set_id(node, unique_id, overwrite=False): - exists = node.parm("id") - if not exists: - imprint(node, {"id": unique_id}) - - if not exists and overwrite: - node.setParm("id", unique_id) - - -def get_id(node): - """Get the `cbId` attribute of the given node. - - Args: - node (hou.Node): the name of the node to retrieve the attribute from - - Returns: - str: cbId attribute of the node. - - """ - - if node is not None: - return node.parm("id") - - -def generate_ids(nodes, folder_id=None): - """Returns new unique ids for the given nodes. - - Note: This does not assign the new ids, it only generates the values. - - To assign new ids using this method: - >>> nodes = ["a", "b", "c"] - >>> for node, id in generate_ids(nodes): - >>> set_id(node, id) - - To also override any existing values (and assign regenerated ids): - >>> nodes = ["a", "b", "c"] - >>> for node, id in generate_ids(nodes): - >>> set_id(node, id, overwrite=True) - - Args: - nodes (list): List of nodes. - folder_id (str): Folder id . Use current folder id if is ``None``. - - Returns: - list: A list of (node, id) tuples. - - """ - - if folder_id is None: - project_name = get_current_project_name() - folder_path = get_current_folder_path() - # Get folder id of current context folder - folder_entity = ayon_api.get_folder_by_path( - project_name, folder_path, fields={"id"} - ) - if not folder_entity: - raise ValueError("No current folder is set.") - - folder_id = folder_entity["id"] - - node_ids = [] - for node in nodes: - _, uid = str(uuid.uuid4()).rsplit("-", 1) - unique_id = "{}:{}".format(folder_id, uid) - node_ids.append((node, unique_id)) - - return node_ids - - -def get_id_required_nodes(): - - valid_types = ["geometry"] - nodes = {n for n in hou.node("/out").children() if - n.type().name() in valid_types} - - return list(nodes) - - def get_output_parameter(node): """Return the render output parameter of the given node @@ -322,7 +243,10 @@ def render_rop(ropnode): try: ropnode.render(verbose=verbose, # Allow Deadline to capture completion percentage - output_progress=verbose) + output_progress=verbose, + # Render only this node + # (do not render any of its dependencies) + ignore_inputs=True) except hou.Error as exc: # The hou.Error is not inherited from a Python Exception class, # so we explicitly capture the houdini error, otherwise pyblish @@ -526,7 +450,7 @@ def maintained_selection(): node.setSelected(on=True) -def reset_framerange(): +def reset_framerange(fps=True, frame_range=True): """Set frame range and FPS to current folder.""" project_name = get_current_project_name() @@ -535,29 +459,32 @@ def reset_framerange(): folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) folder_attributes = folder_entity["attrib"] - # Get FPS - fps = get_folder_fps(folder_entity) + # Set FPS + if fps: + fps = get_folder_fps(folder_entity) + print("Setting scene FPS to {}".format(int(fps))) + set_scene_fps(fps) - # Get Start and End Frames - frame_start = folder_attributes.get("frameStart") - frame_end = folder_attributes.get("frameEnd") + if frame_range: - if frame_start is None or frame_end is None: - log.warning("No edit information found for '{}'".format(folder_path)) - return + # Set Start and End Frames + frame_start = folder_attributes.get("frameStart") + frame_end = folder_attributes.get("frameEnd") - handle_start = folder_attributes.get("handleStart", 0) - handle_end = folder_attributes.get("handleEnd", 0) + if frame_start is None or frame_end is None: + log.warning("No edit information found for '%s'", folder_path) + return - frame_start -= int(handle_start) - frame_end += int(handle_end) + handle_start = folder_attributes.get("handleStart", 0) + handle_end = folder_attributes.get("handleEnd", 0) - # Set frame range and FPS - print("Setting scene FPS to {}".format(int(fps))) - set_scene_fps(fps) - hou.playbar.setFrameRange(frame_start, frame_end) - hou.playbar.setPlaybackRange(frame_start, frame_end) - hou.setFrame(frame_start) + frame_start -= int(handle_start) + frame_end += int(handle_end) + + # Set frame range and FPS + hou.playbar.setFrameRange(frame_start, frame_end) + hou.playbar.setPlaybackRange(frame_start, frame_end) + hou.setFrame(frame_start) def get_main_window(): @@ -814,7 +741,7 @@ def set_camera_resolution(camera, folder_entity=None): """Apply resolution to camera from folder entity of the publish""" if not folder_entity: - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() resolution = get_resolution_from_folder(folder_entity) @@ -1024,7 +951,7 @@ def self_publish(): Firstly, it gets the node and its dependencies. Then, it deactivates all other ROPs - And finaly, it triggers the publishing action. + And finally, it triggers the publishing action. """ result, comment = hou.ui.readInput( @@ -1072,3 +999,84 @@ def add_self_publish_button(node): template = node.parmTemplateGroup() template.insertBefore((0,), button_parm) node.setParmTemplateGroup(template) + + +def update_content_on_context_change(): + """Update all Creator instances to current asset""" + host = registered_host() + context = host.get_current_context() + + folder_path = context["folder_path"] + task = context["task_name"] + + create_context = CreateContext(host, reset=True) + + for instance in create_context.instances: + instance_folder_path = instance.get("folderPath") + if instance_folder_path and instance_folder_path != folder_path: + instance["folderPath"] = folder_path + instance_task = instance.get("task") + if instance_task and instance_task != task: + instance["task"] = task + + create_context.save_changes() + + +def prompt_reset_context(): + """Prompt the user what context settings to reset. + This prompt is used on saving to a different task to allow the scene to + get matched to the new context. + """ + # TODO: Cleanup this prototyped mess of imports and odd dialog + from ayon_core.tools.attribute_defs.dialog import ( + AttributeDefinitionsDialog + ) + from ayon_core.style import load_stylesheet + from ayon_core.lib import BoolDef, UILabelDef + + definitions = [ + UILabelDef( + label=( + "You are saving your workfile into a different folder or task." + "\n\n" + "Would you like to update some settings to the new context?\n" + ) + ), + BoolDef( + "fps", + label="FPS", + tooltip="Reset workfile FPS", + default=True + ), + BoolDef( + "frame_range", + label="Frame Range", + tooltip="Reset workfile start and end frame ranges", + default=True + ), + BoolDef( + "instances", + label="Publish instances", + tooltip="Update all publish instance's folder and task to match " + "the new folder and task", + default=True + ), + ] + + dialog = AttributeDefinitionsDialog(definitions) + dialog.setWindowTitle("Saving to different context.") + dialog.setStyleSheet(load_stylesheet()) + if not dialog.exec_(): + return None + + options = dialog.get_values() + if options["fps"] or options["frame_range"]: + reset_framerange( + fps=options["fps"], + frame_range=options["frame_range"] + ) + + if options["instances"]: + update_content_on_context_change() + + dialog.deleteLater() diff --git a/client/ayon_core/hosts/houdini/api/pipeline.py b/client/ayon_core/hosts/houdini/api/pipeline.py index d5144200cf..4797cf36a0 100644 --- a/client/ayon_core/hosts/houdini/api/pipeline.py +++ b/client/ayon_core/hosts/houdini/api/pipeline.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Pipeline tools for OpenPype Houdini integration.""" import os -import sys import logging import hou # noqa @@ -39,6 +38,9 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") +# Track whether the workfile tool is about to save +_about_to_save = False + class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): name = "houdini" @@ -61,10 +63,12 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): log.info("Installing callbacks ... ") # register_event_callback("init", on_init) self._register_callbacks() + register_event_callback("workfile.save.before", before_workfile_save) register_event_callback("before.save", before_save) register_event_callback("save", on_save) register_event_callback("open", on_open) register_event_callback("new", on_new) + register_event_callback("taskChanged", on_task_changed) self._has_been_setup = True @@ -166,7 +170,7 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): if not op_ctx: op_ctx = self.create_context_node() - lib.imprint(op_ctx, data) + lib.imprint(op_ctx, data, update=True) def get_context_data(self): op_ctx = hou.node(CONTEXT_CONTAINER) @@ -287,6 +291,11 @@ def ls(): yield parse_container(container) +def before_workfile_save(event): + global _about_to_save + _about_to_save = True + + def before_save(): return lib.validate_fps() @@ -298,9 +307,16 @@ def on_save(): # update houdini vars lib.update_houdini_vars_context_dialog() - nodes = lib.get_id_required_nodes() - for node, new_id in lib.generate_ids(nodes): - lib.set_id(node, new_id, overwrite=False) + # We are now starting the actual save directly + global _about_to_save + _about_to_save = False + + +def on_task_changed(): + global _about_to_save + if not IS_HEADLESS and _about_to_save: + # Let's prompt the user to update the context settings or not + lib.prompt_reset_context() def _show_outdated_content_popup(): diff --git a/client/ayon_core/hosts/houdini/hooks/set_paths.py b/client/ayon_core/hosts/houdini/hooks/set_paths.py index 7eb346cc74..4b89ebe944 100644 --- a/client/ayon_core/hosts/houdini/hooks/set_paths.py +++ b/client/ayon_core/hosts/houdini/hooks/set_paths.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class SetPath(PreLaunchHook): diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_redshift_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_redshift_rop.py index ba0795a26e..1cd239e929 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_redshift_rop.py @@ -15,6 +15,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator): product_type = "redshift_rop" icon = "magic" ext = "exr" + multi_layered_mode = "No Multi-Layered EXR File" # Default to split export and render jobs split_render = True @@ -55,25 +56,36 @@ class CreateRedshiftROP(plugin.HoudiniCreator): # Set the linked rop to the Redshift ROP ipr_rop.parm("linked_rop").set(instance_node.path()) - ext = pre_create_data.get("image_format") - filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format( - renders_dir=hou.text.expandString("$HIP/pyblish/renders/"), - product_name=product_name, - fmt="${aov}.$F4.{ext}".format(aov="AOV", ext=ext) - ) + multi_layered_mode = pre_create_data.get("multi_layered_mode") ext_format_index = {"exr": 0, "tif": 1, "jpg": 2, "png": 3} + multilayer_mode_index = {"No Multi-Layered EXR File": "1", + "Full Multi-Layered EXR File": "2" } + + filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format( + renders_dir=hou.text.expandString("$HIP/pyblish/renders/"), + product_name=product_name, + fmt="$AOV.$F4.{ext}".format(ext=ext) + ) + + if multilayer_mode_index[multi_layered_mode] == "1": + multipart = False + + elif multilayer_mode_index[multi_layered_mode] == "2": + multipart = True parms = { # Render frame range "trange": 1, # Redshift ROP settings "RS_outputFileNamePrefix": filepath, - "RS_outputMultilayerMode": "1", # no multi-layered exr "RS_outputBeautyAOVSuffix": "beauty", "RS_outputFileFormat": ext_format_index[ext], } + if ext == "exr": + parms["RS_outputMultilayerMode"] = multilayer_mode_index[multi_layered_mode] + parms["RS_aovMultipart"] = multipart if self.selected_nodes: # set up the render camera from the selected node @@ -111,6 +123,11 @@ class CreateRedshiftROP(plugin.HoudiniCreator): image_format_enum = [ "exr", "tif", "jpg", "png", ] + multi_layered_mode = [ + "No Multi-Layered EXR File", + "Full Multi-Layered EXR File" + ] + return attrs + [ BoolDef("farm", @@ -122,5 +139,9 @@ class CreateRedshiftROP(plugin.HoudiniCreator): EnumDef("image_format", image_format_enum, default=self.ext, - label="Image Format Options") + label="Image Format Options"), + EnumDef("multi_layered_mode", + multi_layered_mode, + default=self.multi_layered_mode, + label="Multi-Layered EXR") ] diff --git a/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py b/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py index b813f82e2e..4cebd537bb 100644 --- a/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py +++ b/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py @@ -3,7 +3,7 @@ from ayon_core.hosts.houdini.api.lib import ( get_camera_from_container, set_camera_resolution ) -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity class SetCameraResolution(InventoryAction): @@ -19,7 +19,7 @@ class SetCameraResolution(InventoryAction): ) def process(self, containers): - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() for container in containers: node = container["node"] camera = get_camera_from_container(node) diff --git a/client/ayon_core/hosts/houdini/plugins/load/actions.py b/client/ayon_core/hosts/houdini/plugins/load/actions.py index c277005919..fbd89ab9c2 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/actions.py +++ b/client/ayon_core/hosts/houdini/plugins/load/actions.py @@ -76,8 +76,8 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): return # Include handles - start -= version_data.get("handleStart", 0) - end += version_data.get("handleEnd", 0) + start -= version_attributes.get("handleStart", 0) + end += version_attributes.get("handleEnd", 0) hou.playbar.setFrameRange(start, end) hou.playbar.setPlaybackRange(start, end) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py b/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py index a77d06d409..37657cbdff 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py @@ -59,7 +59,7 @@ class AbcLoader(load.LoaderPlugin): normal_node.setInput(0, unpack) - null = container.createNode("null", node_name="OUT".format(name)) + null = container.createNode("null", node_name="OUT") null.setInput(0, normal_node) # Ensure display flag is on the Alembic input node and not on the OUT diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_camera.py b/client/ayon_core/hosts/houdini/plugins/load/load_camera.py index 605e5724e6..7cb4542d0c 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_camera.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_camera.py @@ -167,6 +167,9 @@ class CameraLoader(load.LoaderPlugin): temp_camera.destroy() + def switch(self, container, context): + self.update(container, context) + def remove(self, container): node = container["node"] @@ -195,7 +198,6 @@ class CameraLoader(load.LoaderPlugin): def _match_maya_render_mask(self, camera): """Workaround to match Maya render mask in Houdini""" - # print("Setting match maya render mask ") parm = camera.parm("aperture") expression = parm.expression() expression = expression.replace("return ", "aperture = ") diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py b/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py new file mode 100644 index 0000000000..f107190f96 --- /dev/null +++ b/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py @@ -0,0 +1,129 @@ +import os +import re + +from ayon_core.pipeline import load +from ayon_core.hosts.houdini.api import pipeline + +import hou + + +class FilePathLoader(load.LoaderPlugin): + """Load a managed filepath to a null node. + + This is useful if for a particular workflow there is no existing loader + yet. A Houdini artists can load as the generic filepath loader and then + reference the relevant Houdini parm to use the exact value. The benefit + is that this filepath will be managed and can be updated as usual. + + """ + + label = "Load filepath to node" + order = 9 + icon = "link" + color = "white" + product_types = {"*"} + representations = ["*"] + + def load(self, context, name=None, namespace=None, data=None): + + # Get the root node + obj = hou.node("/obj") + + # Define node name + namespace = namespace if namespace else context["folder"]["name"] + node_name = "{}_{}".format(namespace, name) if namespace else name + + # Create a null node + container = obj.createNode("null", node_name=node_name) + + # Destroy any children + for node in container.children(): + node.destroy() + + # Add filepath attribute, set value as default value + filepath = self.format_path( + path=self.filepath_from_context(context), + representation=context["representation"] + ) + parm_template_group = container.parmTemplateGroup() + attr_folder = hou.FolderParmTemplate("attributes_folder", "Attributes") + parm = hou.StringParmTemplate(name="filepath", + label="Filepath", + num_components=1, + default_value=(filepath,)) + attr_folder.addParmTemplate(parm) + parm_template_group.append(attr_folder) + + # Hide some default labels + for folder_label in ["Transform", "Render", "Misc", "Redshift OBJ"]: + folder = parm_template_group.findFolder(folder_label) + if not folder: + continue + parm_template_group.hideFolder(folder_label, True) + + container.setParmTemplateGroup(parm_template_group) + + container.setDisplayFlag(False) + container.setSelectableInViewport(False) + container.useXray(False) + + nodes = [container] + + self[:] = nodes + + return pipeline.containerise( + node_name, + namespace, + nodes, + context, + self.__class__.__name__, + suffix="", + ) + + def update(self, container, context): + + # Update the file path + representation_entity = context["representation"] + file_path = self.format_path( + path=self.filepath_from_context(context), + representation=representation_entity + ) + + node = container["node"] + node.setParms({ + "filepath": file_path, + "representation": str(representation_entity["id"]) + }) + + # Update the parameter default value (cosmetics) + parm_template_group = node.parmTemplateGroup() + parm = parm_template_group.find("filepath") + parm.setDefaultValue((file_path,)) + parm_template_group.replace(parm_template_group.find("filepath"), + parm) + node.setParmTemplateGroup(parm_template_group) + + def switch(self, container, context): + self.update(container, context) + + def remove(self, container): + + node = container["node"] + node.destroy() + + @staticmethod + def format_path(path: str, representation: dict) -> str: + """Format file path for sequence with $F.""" + if not os.path.exists(path): + raise RuntimeError("Path does not exist: %s" % path) + + # The path is either a single file or sequence in a folder. + frame = representation["context"].get("frame") + if frame is not None: + # Substitute frame number in sequence with $F with padding + ext = representation.get("ext", representation["name"]) + token = "$F{}".format(len(frame)) # e.g. $F4 + pattern = r"\.(\d+)\.{ext}$".format(ext=re.escape(ext)) + path = re.sub(pattern, ".{}.{}".format(token, ext), path) + + return os.path.normpath(path).replace("\\", "/") diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py b/client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py new file mode 100644 index 0000000000..5b7e022e73 --- /dev/null +++ b/client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py @@ -0,0 +1,77 @@ +import os + +from ayon_core.pipeline import load +from ayon_core.hosts.houdini.api import pipeline + + +class SopUsdImportLoader(load.LoaderPlugin): + """Load USD to SOPs via `usdimport`""" + + label = "Load USD to SOPs" + product_types = {"*"} + representations = ["usd"] + order = -6 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + import hou + + # Format file name, Houdini only wants forward slashes + file_path = self.filepath_from_context(context) + file_path = os.path.normpath(file_path) + file_path = file_path.replace("\\", "/") + + # Get the root node + obj = hou.node("/obj") + + # Define node name + namespace = namespace if namespace else context["folder"]["name"] + node_name = "{}_{}".format(namespace, name) if namespace else name + + # Create a new geo node + container = obj.createNode("geo", node_name=node_name) + + # Create a usdimport node + usdimport = container.createNode("usdimport", node_name=node_name) + usdimport.setParms({"filepath1": file_path}) + + # Set new position for unpack node else it gets cluttered + nodes = [container, usdimport] + + return pipeline.containerise( + node_name, + namespace, + nodes, + context, + self.__class__.__name__, + suffix="", + ) + + def update(self, container, context): + + node = container["node"] + try: + usdimport_node = next( + n for n in node.children() if n.type().name() == "usdimport" + ) + except StopIteration: + self.log.error("Could not find node of type `usdimport`") + return + + # Update the file path + file_path = self.filepath_from_context(context) + file_path = file_path.replace("\\", "/") + + usdimport_node.setParms({"filepath1": file_path}) + + # Update attribute + node.setParms({"representation": context["representation"]["id"]}) + + def remove(self, container): + + node = container["node"] + node.destroy() + + def switch(self, container, representation): + self.update(container, representation) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_karma_rop.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_karma_rop.py index 85100bc2c6..78651b0c69 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_karma_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_karma_rop.py @@ -41,23 +41,23 @@ class CollectKarmaROPRenderProducts(pyblish.api.InstancePlugin): instance.data["chunkSize"] = chunk_size self.log.debug("Chunk Size: %s" % chunk_size) - default_prefix = evalParmNoFrame(rop, "picture") - render_products = [] + default_prefix = evalParmNoFrame(rop, "picture") + render_products = [] - # Default beauty AOV - beauty_product = self.get_render_product_name( - prefix=default_prefix, suffix=None - ) - render_products.append(beauty_product) + # Default beauty AOV + beauty_product = self.get_render_product_name( + prefix=default_prefix, suffix=None + ) + render_products.append(beauty_product) - files_by_aov = { - "beauty": self.generate_expected_files(instance, - beauty_product) - } + files_by_aov = { + "beauty": self.generate_expected_files(instance, + beauty_product) + } - filenames = list(render_products) - instance.data["files"] = filenames - instance.data["renderProducts"] = colorspace.ARenderProduct() + filenames = list(render_products) + instance.data["files"] = filenames + instance.data["renderProducts"] = colorspace.ARenderProduct() for product in render_products: self.log.debug("Found render product: %s" % product) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_mantra_rop.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_mantra_rop.py index d46476c2ce..df9acc4b61 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_mantra_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_mantra_rop.py @@ -41,57 +41,57 @@ class CollectMantraROPRenderProducts(pyblish.api.InstancePlugin): instance.data["chunkSize"] = chunk_size self.log.debug("Chunk Size: %s" % chunk_size) - default_prefix = evalParmNoFrame(rop, "vm_picture") - render_products = [] + default_prefix = evalParmNoFrame(rop, "vm_picture") + render_products = [] - # Store whether we are splitting the render job (export + render) - split_render = bool(rop.parm("soho_outputmode").eval()) - instance.data["splitRender"] = split_render - export_prefix = None - export_products = [] - if split_render: - export_prefix = evalParmNoFrame( - rop, "soho_diskfile", pad_character="0" - ) - beauty_export_product = self.get_render_product_name( - prefix=export_prefix, - suffix=None) - export_products.append(beauty_export_product) - self.log.debug( - "Found export product: {}".format(beauty_export_product) - ) - instance.data["ifdFile"] = beauty_export_product - instance.data["exportFiles"] = list(export_products) - - # Default beauty AOV - beauty_product = self.get_render_product_name( - prefix=default_prefix, suffix=None + # Store whether we are splitting the render job (export + render) + split_render = bool(rop.parm("soho_outputmode").eval()) + instance.data["splitRender"] = split_render + export_prefix = None + export_products = [] + if split_render: + export_prefix = evalParmNoFrame( + rop, "soho_diskfile", pad_character="0" ) - render_products.append(beauty_product) + beauty_export_product = self.get_render_product_name( + prefix=export_prefix, + suffix=None) + export_products.append(beauty_export_product) + self.log.debug( + "Found export product: {}".format(beauty_export_product) + ) + instance.data["ifdFile"] = beauty_export_product + instance.data["exportFiles"] = list(export_products) - files_by_aov = { - "beauty": self.generate_expected_files(instance, - beauty_product) - } + # Default beauty AOV + beauty_product = self.get_render_product_name( + prefix=default_prefix, suffix=None + ) + render_products.append(beauty_product) - aov_numbers = rop.evalParm("vm_numaux") - if aov_numbers > 0: - # get the filenames of the AOVs - for i in range(1, aov_numbers + 1): - var = rop.evalParm("vm_variable_plane%d" % i) - if var: - aov_name = "vm_filename_plane%d" % i - aov_boolean = "vm_usefile_plane%d" % i - aov_enabled = rop.evalParm(aov_boolean) - has_aov_path = rop.evalParm(aov_name) - if has_aov_path and aov_enabled == 1: - aov_prefix = evalParmNoFrame(rop, aov_name) - aov_product = self.get_render_product_name( - prefix=aov_prefix, suffix=None - ) - render_products.append(aov_product) + files_by_aov = { + "beauty": self.generate_expected_files(instance, + beauty_product) + } - files_by_aov[var] = self.generate_expected_files(instance, aov_product) # noqa + aov_numbers = rop.evalParm("vm_numaux") + if aov_numbers > 0: + # get the filenames of the AOVs + for i in range(1, aov_numbers + 1): + var = rop.evalParm("vm_variable_plane%d" % i) + if var: + aov_name = "vm_filename_plane%d" % i + aov_boolean = "vm_usefile_plane%d" % i + aov_enabled = rop.evalParm(aov_boolean) + has_aov_path = rop.evalParm(aov_name) + if has_aov_path and aov_enabled == 1: + aov_prefix = evalParmNoFrame(rop, aov_name) + aov_product = self.get_render_product_name( + prefix=aov_prefix, suffix=None + ) + render_products.append(aov_product) + + files_by_aov[var] = self.generate_expected_files(instance, aov_product) # noqa for product in render_products: self.log.debug("Found render product: %s" % product) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_redshift_rop.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_redshift_rop.py index 8437757c58..55a55bb12a 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_redshift_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -60,15 +60,22 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): instance.data["ifdFile"] = beauty_export_product instance.data["exportFiles"] = list(export_products) - # Default beauty AOV + full_exr_mode = (rop.evalParm("RS_outputMultilayerMode") == "2") + if full_exr_mode: + # Ignore beauty suffix if full mode is enabled + # As this is what the rop does. + beauty_suffix = "" + + # Default beauty/main layer AOV beauty_product = self.get_render_product_name( prefix=default_prefix, suffix=beauty_suffix ) render_products = [beauty_product] files_by_aov = { - "_": self.generate_expected_files(instance, - beauty_product)} - + beauty_suffix: self.generate_expected_files(instance, + beauty_product) + } + aovs_rop = rop.parm("RS_aovGetFromNode").evalAsNode() if aovs_rop: rop = aovs_rop @@ -89,11 +96,14 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): if not aov_prefix: aov_prefix = default_prefix - aov_product = self.get_render_product_name(aov_prefix, aov_suffix) - render_products.append(aov_product) + if rop.parm(f"RS_aovID_{i}").evalAsString() == "CRYPTOMATTE" or \ + not full_exr_mode: + + aov_product = self.get_render_product_name(aov_prefix, aov_suffix) + render_products.append(aov_product) - files_by_aov[aov_suffix] = self.generate_expected_files(instance, - aov_product) # noqa + files_by_aov[aov_suffix] = self.generate_expected_files(instance, + aov_product) # noqa for product in render_products: self.log.debug("Found render product: %s" % product) @@ -121,7 +131,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): # When AOV is explicitly defined in prefix we just swap it out # directly with the AOV suffix to embed it. - # Note: ${AOV} seems to be evaluated in the parameter as %AOV% + # Note: '$AOV' seems to be evaluated in the parameter as '%AOV%' has_aov_in_prefix = "%AOV%" in prefix if has_aov_in_prefix: # It seems that when some special separator characters are present diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py index 95414ae7f1..fdf03d5cba 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -71,6 +71,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): # the isinstance check above should be stricter than this category if output_node.type().category().name() != "Cop2": raise PublishValidationError( - ("Output node %s is not of category Cop2. " - "This is a bug...").format(output_node.path()), + ( + "Output node {} is not of category Cop2." + " This is a bug..." + ).format(output_node.path()), title=cls.label) diff --git a/client/ayon_core/hosts/max/api/lib.py b/client/ayon_core/hosts/max/api/lib.py index 5f13856c9b..48bb15f538 100644 --- a/client/ayon_core/hosts/max/api/lib.py +++ b/client/ayon_core/hosts/max/api/lib.py @@ -11,7 +11,7 @@ import ayon_api from ayon_core.pipeline import get_current_project_name, colorspace from ayon_core.settings import get_project_settings from ayon_core.pipeline.context_tools import ( - get_current_project_folder, + get_current_folder_entity, ) from ayon_core.style import load_stylesheet from pymxs import runtime as rt @@ -222,7 +222,7 @@ def reset_scene_resolution(): contains any information regarding scene resolution. """ - folder_entity = get_current_project_folder( + folder_entity = get_current_folder_entity( fields={"attrib.resolutionWidth", "attrib.resolutionHeight"} ) folder_attributes = folder_entity["attrib"] @@ -243,7 +243,7 @@ def get_frame_range(folder_entiy=None) -> Union[Dict[str, Any], None]: """ # Set frame start/end if folder_entiy is None: - folder_entiy = get_current_project_folder() + folder_entiy = get_current_folder_entity() folder_attributes = folder_entiy["attrib"] frame_start = folder_attributes.get("frameStart") diff --git a/client/ayon_core/hosts/max/api/lib_rendersettings.py b/client/ayon_core/hosts/max/api/lib_rendersettings.py index 8a9881f032..35b6d064c1 100644 --- a/client/ayon_core/hosts/max/api/lib_rendersettings.py +++ b/client/ayon_core/hosts/max/api/lib_rendersettings.py @@ -3,7 +3,7 @@ from pymxs import runtime as rt from ayon_core.lib import Logger from ayon_core.settings import get_project_settings from ayon_core.pipeline import get_current_project_name -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.hosts.max.api.lib import ( set_render_frame_range, @@ -57,7 +57,7 @@ class RenderSettings(object): if not os.path.exists(output_dir): os.makedirs(output_dir) # hard-coded, should be customized in the setting - folder_attributes = get_current_project_folder()["attrib"] + folder_attributes = get_current_folder_entity()["attrib"] # get project resolution width = folder_attributes.get("resolutionWidth") diff --git a/client/ayon_core/hosts/max/api/pipeline.py b/client/ayon_core/hosts/max/api/pipeline.py index 4b1dcc25d3..675f36c24f 100644 --- a/client/ayon_core/hosts/max/api/pipeline.py +++ b/client/ayon_core/hosts/max/api/pipeline.py @@ -240,10 +240,10 @@ def get_previous_loaded_object(container: str): node_list(list): list of nodes which are previously loaded """ node_list = [] - sel_list = rt.getProperty(container.modifiers[0].openPypeData, "sel_list") - for obj in rt.Objects: - if str(obj) in sel_list: - node_list.append(obj) + node_transform_monitor_list = rt.getProperty( + container.modifiers[0].openPypeData, "all_handles") + for node_transform_monitor in node_transform_monitor_list: + node_list.append(node_transform_monitor.node) return node_list diff --git a/client/ayon_core/hosts/max/hooks/force_startup_script.py b/client/ayon_core/hosts/max/hooks/force_startup_script.py index 8ccd658e8f..417f0049ab 100644 --- a/client/ayon_core/hosts/max/hooks/force_startup_script.py +++ b/client/ayon_core/hosts/max/hooks/force_startup_script.py @@ -2,7 +2,7 @@ """Pre-launch to force 3ds max startup script.""" import os from ayon_core.hosts.max import MAX_HOST_DIR -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class ForceStartupScript(PreLaunchHook): diff --git a/client/ayon_core/hosts/max/hooks/inject_python.py b/client/ayon_core/hosts/max/hooks/inject_python.py index b1b36e75bd..fc9626ab87 100644 --- a/client/ayon_core/hosts/max/hooks/inject_python.py +++ b/client/ayon_core/hosts/max/hooks/inject_python.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Pre-launch hook to inject python environment.""" import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class InjectPythonPath(PreLaunchHook): diff --git a/client/ayon_core/hosts/max/hooks/set_paths.py b/client/ayon_core/hosts/max/hooks/set_paths.py index 0ee1b0dab7..f066de092e 100644 --- a/client/ayon_core/hosts/max/hooks/set_paths.py +++ b/client/ayon_core/hosts/max/hooks/set_paths.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class SetPath(PreLaunchHook): diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_obj.py b/client/ayon_core/hosts/max/plugins/load/load_model_obj.py index 4f8a22af07..2330dbfc24 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_obj.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_obj.py @@ -7,7 +7,6 @@ from ayon_core.hosts.max.api.lib import ( maintained_selection, object_transform_set ) -from ayon_core.hosts.max.api.lib import maintained_selection from ayon_core.hosts.max.api.pipeline import ( containerise, get_previous_loaded_object, diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_camera_contents.py b/client/ayon_core/hosts/max/plugins/publish/validate_camera_contents.py index 0473fd4a8a..334e7dcec9 100644 --- a/client/ayon_core/hosts/max/plugins/publish/validate_camera_contents.py +++ b/client/ayon_core/hosts/max/plugins/publish/validate_camera_contents.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import pyblish.api + from ayon_core.pipeline import PublishValidationError -from pymxs import runtime as rt class ValidateCameraContent(pyblish.api.InstancePlugin): diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_renderpasses.py b/client/ayon_core/hosts/max/plugins/publish/validate_renderpasses.py index ba948747b9..394d3119c4 100644 --- a/client/ayon_core/hosts/max/plugins/publish/validate_renderpasses.py +++ b/client/ayon_core/hosts/max/plugins/publish/validate_renderpasses.py @@ -140,7 +140,7 @@ class ValidateRenderPasses(OptionalPyblishPluginMixin, invalid = [] if instance.name not in file_name: cls.log.error("The renderpass filename should contain the instance name.") - invalid.append((f"Invalid instance name", + invalid.append(("Invalid instance name", file_name)) if renderpass is not None: if not file_name.rstrip(".").endswith(renderpass): diff --git a/client/ayon_core/hosts/maya/api/action.py b/client/ayon_core/hosts/maya/api/action.py index baf558036e..d845ac6066 100644 --- a/client/ayon_core/hosts/maya/api/action.py +++ b/client/ayon_core/hosts/maya/api/action.py @@ -4,7 +4,10 @@ from __future__ import absolute_import import pyblish.api import ayon_api -from ayon_core.pipeline.publish import get_errored_instances_from_context +from ayon_core.pipeline.publish import ( + get_errored_instances_from_context, + get_errored_plugins_from_context +) class GenerateUUIDsOnInvalidAction(pyblish.api.Action): @@ -112,20 +115,25 @@ class SelectInvalidAction(pyblish.api.Action): except ImportError: raise ImportError("Current host is not Maya") - errored_instances = get_errored_instances_from_context(context, - plugin=plugin) - # Get the invalid nodes for the plug-ins self.log.info("Finding invalid nodes..") invalid = list() - for instance in errored_instances: - invalid_nodes = plugin.get_invalid(instance) - if invalid_nodes: - if isinstance(invalid_nodes, (list, tuple)): - invalid.extend(invalid_nodes) - else: - self.log.warning("Plug-in returned to be invalid, " - "but has no selectable nodes.") + if issubclass(plugin, pyblish.api.ContextPlugin): + errored_plugins = get_errored_plugins_from_context(context) + if plugin in errored_plugins: + invalid = plugin.get_invalid(context) + else: + errored_instances = get_errored_instances_from_context( + context, plugin=plugin + ) + for instance in errored_instances: + invalid_nodes = plugin.get_invalid(instance) + if invalid_nodes: + if isinstance(invalid_nodes, (list, tuple)): + invalid.extend(invalid_nodes) + else: + self.log.warning("Plug-in returned to be invalid, " + "but has no selectable nodes.") # Ensure unique (process each node only once) invalid = list(set(invalid)) diff --git a/client/ayon_core/hosts/maya/api/customize.py b/client/ayon_core/hosts/maya/api/customize.py index 4db8819ff5..16255f69ba 100644 --- a/client/ayon_core/hosts/maya/api/customize.py +++ b/client/ayon_core/hosts/maya/api/customize.py @@ -113,7 +113,9 @@ def override_toolbox_ui(): annotation="Look Manager", label="Look Manager", image=os.path.join(icons, "lookmanager.png"), - command=show_look_assigner, + command=lambda: show_look_assigner( + parent=parent_widget + ), width=icon_size, height=icon_size, parent=parent diff --git a/client/ayon_core/hosts/maya/api/lib.py b/client/ayon_core/hosts/maya/api/lib.py index 8ca898f621..060c16056c 100644 --- a/client/ayon_core/hosts/maya/api/lib.py +++ b/client/ayon_core/hosts/maya/api/lib.py @@ -37,7 +37,7 @@ from ayon_core.pipeline import ( AYON_CONTAINER_ID, ) from ayon_core.lib import NumberDef -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.pipeline.create import CreateContext from ayon_core.lib.profiles_filtering import filter_profiles @@ -131,7 +131,7 @@ def get_main_window(): def suspended_refresh(suspend=True): """Suspend viewport refreshes - cmds.ogs(pause=True) is a toggle so we cant pass False. + cmds.ogs(pause=True) is a toggle so we can't pass False. """ if IS_HEADLESS: yield @@ -583,7 +583,7 @@ def pairwise(iterable): def collect_animation_defs(fps=False): - """Get the basic animation attribute defintions for the publisher. + """Get the basic animation attribute definitions for the publisher. Returns: OrderedDict @@ -1876,18 +1876,9 @@ def list_looks(project_name, folder_id): list[dict[str, Any]]: List of look products. """ - # # get all products with look leading in - # the name associated with the asset - # TODO this should probably look for product type 'look' instead of - # checking product name that can not start with product type - product_entities = ayon_api.get_products( - project_name, folder_ids=[folder_id] - ) - return [ - product_entity - for product_entity in product_entities - if product_entity["name"].startswith("look") - ] + return list(ayon_api.get_products( + project_name, folder_ids=[folder_id], product_types={"look"} + )) def assign_look_by_version(nodes, version_id): @@ -1906,12 +1897,15 @@ def assign_look_by_version(nodes, version_id): project_name = get_current_project_name() # Get representations of shader file and relationships - look_representation = ayon_api.get_representation_by_name( - project_name, "ma", version_id - ) - json_representation = ayon_api.get_representation_by_name( - project_name, "json", version_id - ) + representations = list(ayon_api.get_representations( + project_name=project_name, + representation_names={"ma", "json"}, + version_ids=[version_id] + )) + look_representation = next( + repre for repre in representations if repre["name"] == "ma") + json_representation = next( + repre for repre in representations if repre["name"] == "json") # See if representation is already loaded, if so reuse it. host = registered_host() @@ -1948,7 +1942,7 @@ def assign_look_by_version(nodes, version_id): apply_shaders(relationships, shader_nodes, nodes) -def assign_look(nodes, product_name="lookDefault"): +def assign_look(nodes, product_name="lookMain"): """Assigns a look to a node. Optimizes the nodes by grouping by folder id and finding @@ -1981,14 +1975,10 @@ def assign_look(nodes, product_name="lookDefault"): product_entity["id"] for product_entity in product_entities_by_folder_id.values() } - last_version_entities = ayon_api.get_last_versions( + last_version_entities_by_product_id = ayon_api.get_last_versions( project_name, product_ids ) - last_version_entities_by_product_id = { - last_version_entity["productId"]: last_version_entity - for last_version_entity in last_version_entities - } for folder_id, asset_nodes in grouped.items(): product_entity = product_entities_by_folder_id.get(folder_id) @@ -2125,22 +2115,6 @@ def get_related_sets(node): """ - # Ignore specific suffices - ignore_suffices = ["out_SET", "controls_SET", "_INST", "_CON"] - - # Default nodes to ignore - defaults = {"defaultLightSet", "defaultObjectSet"} - - # Ids to ignore - ignored = { - AVALON_INSTANCE_ID, - AVALON_CONTAINER_ID, - AYON_INSTANCE_ID, - AYON_CONTAINER_ID, - } - - view_sets = get_isolate_view_sets() - sets = cmds.listSets(object=node, extendToShape=False) if not sets: return [] @@ -2151,25 +2125,47 @@ def get_related_sets(node): # returned by `cmds.listSets(allSets=True)` sets = cmds.ls(sets) + # Ids to ignore + ignored = { + AVALON_INSTANCE_ID, + AVALON_CONTAINER_ID, + AYON_INSTANCE_ID, + AYON_CONTAINER_ID, + } + # Ignore `avalon.container` - sets = [s for s in sets if - not cmds.attributeQuery("id", node=s, exists=True) or - not cmds.getAttr("%s.id" % s) in ignored] + sets = [ + s for s in sets + if ( + not cmds.attributeQuery("id", node=s, exists=True) + or cmds.getAttr(f"{s}.id") not in ignored + ) + ] + if not sets: + return sets # Exclude deformer sets (`type=2` for `maya.cmds.listSets`) - deformer_sets = cmds.listSets(object=node, - extendToShape=False, - type=2) or [] - deformer_sets = set(deformer_sets) # optimize lookup - sets = [s for s in sets if s not in deformer_sets] + exclude_sets = cmds.listSets(object=node, + extendToShape=False, + type=2) or [] + exclude_sets = set(exclude_sets) # optimize lookup + + # Default nodes to ignore + exclude_sets.update({"defaultLightSet", "defaultObjectSet"}) + + # Filter out the sets to exclude + sets = [s for s in sets if s not in exclude_sets] # Ignore when the set has a specific suffix - sets = [s for s in sets if not any(s.endswith(x) for x in ignore_suffices)] + ignore_suffices = ("out_SET", "controls_SET", "_INST", "_CON") + sets = [s for s in sets if not s.endswith(ignore_suffices)] + if not sets: + return sets # Ignore viewport filter view sets (from isolate select and # viewports) + view_sets = get_isolate_view_sets() sets = [s for s in sets if s not in view_sets] - sets = [s for s in sets if s not in defaults] return sets @@ -2440,12 +2436,10 @@ def set_scene_fps(fps, update=True): cmds.currentUnit(time=unit, updateAnimation=update) # Set time slider data back to previous state - cmds.playbackOptions(edit=True, minTime=start_frame) - cmds.playbackOptions(edit=True, maxTime=end_frame) - - # Set animation data - cmds.playbackOptions(edit=True, animationStartTime=animation_start) - cmds.playbackOptions(edit=True, animationEndTime=animation_end) + cmds.playbackOptions(minTime=start_frame, + maxTime=end_frame, + animationStartTime=animation_start, + animationEndTime=animation_end) cmds.currentTime(current_frame, edit=True, update=True) @@ -2521,7 +2515,7 @@ def get_fps_for_current_context(): def get_frame_range(include_animation_range=False): - """Get the current folder frame range and handles. + """Get the current task frame range and handles. Args: include_animation_range (bool, optional): Whether to include @@ -2529,25 +2523,34 @@ def get_frame_range(include_animation_range=False): range of the timeline. It is excluded by default. Returns: - dict: Folder's expected frame range values. + dict: Task's expected frame range values. """ # Set frame start/end project_name = get_current_project_name() folder_path = get_current_folder_path() - folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) - folder_attributes = folder_entity["attrib"] + task_name = get_current_task_name() - frame_start = folder_attributes.get("frameStart") - frame_end = folder_attributes.get("frameEnd") + folder_entity = ayon_api.get_folder_by_path( + project_name, + folder_path, + fields={"id"}) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + + task_attributes = task_entity["attrib"] + + frame_start = task_attributes.get("frameStart") + frame_end = task_attributes.get("frameEnd") if frame_start is None or frame_end is None: cmds.warning("No edit information found for '{}'".format(folder_path)) return - handle_start = folder_attributes.get("handleStart") or 0 - handle_end = folder_attributes.get("handleEnd") or 0 + handle_start = task_attributes.get("handleStart") or 0 + handle_end = task_attributes.get("handleEnd") or 0 frame_range = { "frameStart": frame_start, @@ -2561,14 +2564,10 @@ def get_frame_range(include_animation_range=False): # Some usages of this function use the full dictionary to define # instance attributes for which we want to exclude the animation # keys. That is why these are excluded by default. - task_name = get_current_task_name() + settings = get_project_settings(project_name) - task_entity = ayon_api.get_task_by_name( - project_name, folder_entity["id"], task_name - ) - task_type = None - if task_entity: - task_type = task_entity["taskType"] + + task_type = task_entity["taskType"] include_handles_settings = settings["maya"]["include_handles"] @@ -2637,7 +2636,7 @@ def reset_scene_resolution(): None """ - folder_attributes = get_current_project_folder()["attrib"] + folder_attributes = get_current_folder_entity()["attrib"] # Set resolution width = folder_attributes.get("resolutionWidth", 1920) @@ -2647,31 +2646,114 @@ def reset_scene_resolution(): set_scene_resolution(width, height, pixelAspect) -def set_context_settings(): +def set_context_settings( + fps=True, + resolution=True, + frame_range=True, + colorspace=True +): """Apply the project settings from the project definition - Settings can be overwritten by an folder if the folder.attrib contains + Settings can be overwritten by an asset if the asset.data contains any information regarding those settings. - Examples of settings: - fps - resolution - renderer + Args: + fps (bool): Whether to set the scene FPS. + resolution (bool): Whether to set the render resolution. + frame_range (bool): Whether to reset the time slide frame ranges. + colorspace (bool): Whether to reset the colorspace. Returns: None """ - # Set project fps - set_scene_fps(get_fps_for_current_context()) + if fps: + # Set project fps + set_scene_fps(get_fps_for_current_context()) - reset_scene_resolution() + if resolution: + reset_scene_resolution() # Set frame range. - reset_frame_range() + if frame_range: + reset_frame_range(fps=False) # Set colorspace - set_colorspace() + if colorspace: + set_colorspace() + + +def prompt_reset_context(): + """Prompt the user what context settings to reset. + This prompt is used on saving to a different task to allow the scene to + get matched to the new context. + """ + # TODO: Cleanup this prototyped mess of imports and odd dialog + from ayon_core.tools.attribute_defs.dialog import ( + AttributeDefinitionsDialog + ) + from ayon_core.style import load_stylesheet + from ayon_core.lib import BoolDef, UILabelDef + + definitions = [ + UILabelDef( + label=( + "You are saving your workfile into a different folder or task." + "\n\n" + "Would you like to update some settings to the new context?\n" + ) + ), + BoolDef( + "fps", + label="FPS", + tooltip="Reset workfile FPS", + default=True + ), + BoolDef( + "frame_range", + label="Frame Range", + tooltip="Reset workfile start and end frame ranges", + default=True + ), + BoolDef( + "resolution", + label="Resolution", + tooltip="Reset workfile resolution", + default=True + ), + BoolDef( + "colorspace", + label="Colorspace", + tooltip="Reset workfile resolution", + default=True + ), + BoolDef( + "instances", + label="Publish instances", + tooltip="Update all publish instance's folder and task to match " + "the new folder and task", + default=True + ), + ] + + dialog = AttributeDefinitionsDialog(definitions) + dialog.setWindowTitle("Saving to different context.") + dialog.setStyleSheet(load_stylesheet()) + if not dialog.exec_(): + return None + + options = dialog.get_values() + with suspended_refresh(): + set_context_settings( + fps=options["fps"], + resolution=options["resolution"], + frame_range=options["frame_range"], + colorspace=options["colorspace"] + ) + if options["instances"]: + update_content_on_context_change() + + dialog.deleteLater() # Valid FPS @@ -3163,7 +3245,7 @@ def update_content_on_context_change(): This will update scene content to match new folder on context change """ scene_sets = cmds.listSets(allSets=True) - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() folder_attributes = folder_entity["attrib"] new_folder_path = folder_entity["path"] for s in scene_sets: @@ -3834,7 +3916,7 @@ def get_color_management_output_transform(): def image_info(file_path): # type: (str) -> dict - """Based on tha texture path, get its bit depth and format information. + """Based on the texture path, get its bit depth and format information. Take reference from makeTx.py in Arnold: ImageInfo(filename): Get Image Information for colorspace AiTextureGetFormat(filename): Get Texture Format @@ -3922,17 +4004,26 @@ def len_flattened(components): return n -def get_all_children(nodes): +def get_all_children(nodes, ignore_intermediate_objects=False): """Return all children of `nodes` including each instanced child. Using maya.cmds.listRelatives(allDescendents=True) includes only the first instance. As such, this function acts as an optimal replacement with a focus on a fast query. + Args: + nodes (iterable): List of nodes to get children for. + ignore_intermediate_objects (bool): Ignore any children that + are intermediate objects. + + Returns: + set: Children of input nodes. + """ sel = OpenMaya.MSelectionList() traversed = set() iterator = OpenMaya.MItDag(OpenMaya.MItDag.kDepthFirst) + fn_dag = OpenMaya.MFnDagNode() for node in nodes: if node in traversed: @@ -3949,6 +4040,13 @@ def get_all_children(nodes): iterator.next() # noqa: B305 while not iterator.isDone(): + if ignore_intermediate_objects: + fn_dag.setObject(iterator.currentItem()) + if fn_dag.isIntermediateObject: + iterator.prune() + iterator.next() # noqa: B305 + continue + path = iterator.fullPathName() if path in traversed: @@ -3959,7 +4057,7 @@ def get_all_children(nodes): traversed.add(path) iterator.next() # noqa: B305 - return list(traversed) + return traversed def get_capture_preset( diff --git a/client/ayon_core/hosts/maya/api/lib_rendersettings.py b/client/ayon_core/hosts/maya/api/lib_rendersettings.py index 905e8c69af..f9e243146a 100644 --- a/client/ayon_core/hosts/maya/api/lib_rendersettings.py +++ b/client/ayon_core/hosts/maya/api/lib_rendersettings.py @@ -7,7 +7,7 @@ from ayon_core.lib import Logger from ayon_core.settings import get_project_settings from ayon_core.pipeline import CreatorError, get_current_project_name -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.hosts.maya.api.lib import reset_frame_range @@ -77,7 +77,7 @@ class RenderSettings(object): renderer = cmds.getAttr( 'defaultRenderGlobals.currentRenderer').lower() - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() folder_attributes = folder_entity["attrib"] # project_settings/maya/create/CreateRender/aov_separator try: diff --git a/client/ayon_core/hosts/maya/api/pipeline.py b/client/ayon_core/hosts/maya/api/pipeline.py index b3e401b91e..8e6e2ccd8a 100644 --- a/client/ayon_core/hosts/maya/api/pipeline.py +++ b/client/ayon_core/hosts/maya/api/pipeline.py @@ -67,6 +67,9 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") AVALON_CONTAINERS = ":AVALON_CONTAINERS" +# Track whether the workfile tool is about to save +_about_to_save = False + class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): name = "maya" @@ -581,6 +584,10 @@ def on_save(): for node, new_id in lib.generate_ids(nodes): lib.set_id(node, new_id, overwrite=False) + # We are now starting the actual save directly + global _about_to_save + _about_to_save = False + def on_open(): """On scene open let's assume the containers have changed.""" @@ -650,6 +657,11 @@ def on_task_changed(): lib.set_context_settings() lib.update_content_on_context_change() + global _about_to_save + if not lib.IS_HEADLESS and _about_to_save: + # Let's prompt the user to update the context settings or not + lib.prompt_reset_context() + def before_workfile_open(): if handle_workfile_locks(): @@ -664,6 +676,9 @@ def before_workfile_save(event): if workdir_path: create_workspace_mel(workdir_path, project_name) + global _about_to_save + _about_to_save = True + def workfile_save_before_xgen(event): """Manage Xgen external files when switching context. diff --git a/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py b/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py index ed294da125..45785ac354 100644 --- a/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py +++ b/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class MayaPreAutoLoadPlugins(PreLaunchHook): diff --git a/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py b/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py index 3fd81ceff4..683b4c59c7 100644 --- a/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py +++ b/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.maya.lib import create_workspace_mel diff --git a/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py b/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py index 6bf678474f..a54f17c6c6 100644 --- a/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py +++ b/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class MayaPreOpenWorkfilePostInitialization(PreLaunchHook): diff --git a/client/ayon_core/hosts/maya/plugins/create/convert_legacy.py b/client/ayon_core/hosts/maya/plugins/create/convert_legacy.py index 685602ef0b..81cf9613b4 100644 --- a/client/ayon_core/hosts/maya/plugins/create/convert_legacy.py +++ b/client/ayon_core/hosts/maya/plugins/create/convert_legacy.py @@ -83,7 +83,7 @@ class MayaLegacyConvertor(ProductConvertorPlugin, ).format(product_type)) continue - creator_id = product_type_to_id[family] + creator_id = product_type_to_id[product_type] creator = self.create_context.creators[creator_id] data["creator_identifier"] = creator_id diff --git a/client/ayon_core/hosts/maya/plugins/create/create_unreal_skeletalmesh.py b/client/ayon_core/hosts/maya/plugins/create/create_unreal_skeletalmesh.py index 9ded28b812..a32e94971e 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_unreal_skeletalmesh.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_unreal_skeletalmesh.py @@ -20,13 +20,6 @@ class CreateUnrealSkeletalMesh(plugin.MayaCreator): # Defined in settings joint_hints = set() - def apply_settings(self, project_settings): - """Apply project settings to creator""" - settings = ( - project_settings["maya"]["create"]["CreateUnrealSkeletalMesh"] - ) - self.joint_hints = set(settings.get("joint_hints", [])) - def get_dynamic_data( self, project_name, diff --git a/client/ayon_core/hosts/maya/plugins/create/create_unreal_staticmesh.py b/client/ayon_core/hosts/maya/plugins/create/create_unreal_staticmesh.py index 1991f92915..76c33f00cc 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_unreal_staticmesh.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_unreal_staticmesh.py @@ -15,11 +15,6 @@ class CreateUnrealStaticMesh(plugin.MayaCreator): # Defined in settings collision_prefixes = [] - def apply_settings(self, project_settings): - """Apply project settings to creator""" - settings = project_settings["maya"]["create"]["CreateUnrealStaticMesh"] - self.collision_prefixes = settings["collision_prefixes"] - def get_dynamic_data( self, project_name, diff --git a/client/ayon_core/hosts/maya/plugins/create/create_unreal_yeticache.py b/client/ayon_core/hosts/maya/plugins/create/create_unreal_yeticache.py index 1eac8a5ea9..dea64b40fb 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_unreal_yeticache.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_unreal_yeticache.py @@ -5,7 +5,7 @@ from ayon_core.hosts.maya.api import ( from ayon_core.lib import NumberDef -class CreateYetiCache(plugin.MayaCreator): +class CreateUnrealYetiCache(plugin.MayaCreator): """Output for procedural plugin nodes of Yeti """ identifier = "io.openpype.creators.maya.unrealyeticache" diff --git a/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py b/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py index 920ad762b3..7170c30422 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py @@ -12,7 +12,6 @@ from ayon_core.hosts.maya.api.lib import ( unique_namespace, get_attribute_input, maintained_selection, - convert_to_maya_fps ) from ayon_core.hosts.maya.api.pipeline import containerise from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type diff --git a/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py b/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py index 494bc7cfc6..9689282ae9 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py @@ -1,5 +1,3 @@ -import os - import maya.cmds as cmds from ayon_core.hosts.maya.api.pipeline import containerise diff --git a/client/ayon_core/hosts/maya/plugins/load/load_image.py b/client/ayon_core/hosts/maya/plugins/load/load_image.py index 4976c46d7f..3641655d49 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_image.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_image.py @@ -1,10 +1,8 @@ -import os import copy from ayon_core.lib import EnumDef from ayon_core.pipeline import ( load, - get_representation_context, get_current_host_name, ) from ayon_core.pipeline.load.utils import get_representation_path_from_context diff --git a/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py b/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py index 7d6f7e26cf..b298d5b892 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py @@ -142,9 +142,21 @@ class ImagePlaneLoader(load.LoaderPlugin): with namespaced(namespace): # Create inside the namespace image_plane_transform, image_plane_shape = cmds.imagePlane( - fileName=context["representation"]["data"]["path"], + fileName=self.filepath_from_context(context), camera=camera ) + + # Set colorspace + colorspace = self.get_colorspace(context["representation"]) + if colorspace: + cmds.setAttr( + "{}.ignoreColorSpaceFileRules".format(image_plane_shape), + True + ) + cmds.setAttr("{}.colorSpace".format(image_plane_shape), + colorspace, type="string") + + # Set offset frame range start_frame = cmds.playbackOptions(query=True, min=True) end_frame = cmds.playbackOptions(query=True, max=True) @@ -216,6 +228,15 @@ class ImagePlaneLoader(load.LoaderPlugin): repre_entity["id"], type="string") + colorspace = self.get_colorspace(repre_entity) + if colorspace: + cmds.setAttr( + "{}.ignoreColorSpaceFileRules".format(image_plane_shape), + True + ) + cmds.setAttr("{}.colorSpace".format(image_plane_shape), + colorspace, type="string") + # Set frame range. start_frame = folder_entity["attrib"]["frameStart"] end_frame = folder_entity["attrib"]["frameEnd"] @@ -243,3 +264,12 @@ class ImagePlaneLoader(load.LoaderPlugin): deleteNamespaceContent=True) except RuntimeError: pass + + def get_colorspace(self, representation): + + data = representation.get("data", {}).get("colorspaceData", {}) + if not data: + return + + colorspace = data.get("colorspace") + return colorspace diff --git a/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py b/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py index 6f20e677f0..7096f86e35 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py @@ -9,7 +9,9 @@ instance. import json import sys import six +import contextlib +from ayon_core.lib import BoolDef, EnumDef from ayon_core.pipeline import ( load, get_representation_path @@ -21,6 +23,31 @@ from maya import cmds import maya.app.renderSetup.model.renderSetup as renderSetup +@contextlib.contextmanager +def mark_all_imported(enabled): + """Mark all imported nodes accepted by removing the `imported` attribute""" + if not enabled: + yield + return + + node_types = cmds.pluginInfo("renderSetup", query=True, dependNode=True) + + # Get node before load, then we can disable `imported` + # attribute on all new render setup layers after import + before = cmds.ls(type=node_types, long=True) + try: + yield + finally: + after = cmds.ls(type=node_types, long=True) + for node in (node for node in after if node not in before): + if cmds.attributeQuery("imported", + node=node, + exists=True): + plug = "{}.imported".format(node) + if cmds.getAttr(plug): + cmds.deleteAttr(plug) + + class RenderSetupLoader(load.LoaderPlugin): """Load json preset for RenderSetup overwriting current one.""" @@ -32,48 +59,79 @@ class RenderSetupLoader(load.LoaderPlugin): icon = "tablet" color = "orange" + options = [ + BoolDef("accept_import", + label="Accept import on load", + tooltip=( + "By default importing or pasting Render Setup collections " + "will display them italic in the Render Setup list.\nWith " + "this enabled the load will directly mark the import " + "'accepted' and remove the italic view." + ), + default=True), + BoolDef("load_managed", + label="Load Managed", + tooltip=( + "Containerize the rendersetup on load so it can be " + "'updated' later." + ), + default=True), + EnumDef("import_mode", + label="Import mode", + items={ + renderSetup.DECODE_AND_OVERWRITE: ( + "Flush existing render setup and " + "add without any namespace" + ), + renderSetup.DECODE_AND_MERGE: ( + "Merge with the existing render setup objects and " + "rename the unexpected objects" + ), + renderSetup.DECODE_AND_RENAME: ( + "Renaming all decoded render setup objects to not " + "conflict with the existing render setup" + ), + }, + default=renderSetup.DECODE_AND_OVERWRITE) + ] + def load(self, context, name, namespace, data): """Load RenderSetup settings.""" - # from ayon_core.hosts.maya.api.lib import namespaced - - folder_name = context["folder"]["name"] - namespace = namespace or lib.unique_namespace( - folder_name + "_", - prefix="_" if folder_name[0].isdigit() else "", - suffix="_", - ) path = self.filepath_from_context(context) + + accept_import = data.get("accept_import", True) + import_mode = data.get("import_mode", renderSetup.DECODE_AND_OVERWRITE) + self.log.info(">>> loading json [ {} ]".format(path)) - with open(path, "r") as file: - renderSetup.instance().decode( - json.load(file), renderSetup.DECODE_AND_OVERWRITE, None) + with mark_all_imported(accept_import): + with open(path, "r") as file: + renderSetup.instance().decode( + json.load(file), import_mode, None) - nodes = [] - null = cmds.sets(name="null_SET", empty=True) - nodes.append(null) + if data.get("load_managed", True): + self.log.info(">>> containerising [ {} ]".format(name)) + folder_name = context["folder"]["name"] + namespace = namespace or lib.unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) - self[:] = nodes - if not nodes: - return - - self.log.info(">>> containerising [ {} ]".format(name)) - return containerise( - name=name, - namespace=namespace, - nodes=nodes, - context=context, - loader=self.__class__.__name__) + return containerise( + name=name, + namespace=namespace, + nodes=[], + context=context, + loader=self.__class__.__name__) def remove(self, container): """Remove RenderSetup settings instance.""" - from maya import cmds - container_name = container["objectName"] self.log.info("Removing '%s' from Maya.." % container["name"]) - container_content = cmds.sets(container_name, query=True) + container_content = cmds.sets(container_name, query=True) or [] nodes = cmds.ls(container_content, long=True) nodes.append(container_name) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py index 3d984fdc79..0780a7b3e7 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py @@ -61,7 +61,7 @@ class LoadVDBtoArnold(load.LoaderPlugin): path = self.filepath_from_context(context) self._set_path(grid_node, path=path, - representation=context["representation"]) + repre_entity=context["representation"]) # Lock the shape node so the user can't delete the transform/shape # as if it was referenced @@ -91,7 +91,7 @@ class LoadVDBtoArnold(load.LoaderPlugin): assert len(grid_nodes) == 1, "This is a bug" # Update the VRayVolumeGrid - self._set_path(grid_nodes[0], path=path, representation=repre_entity) + self._set_path(grid_nodes[0], path=path, repre_entity=repre_entity) # Update container representation cmds.setAttr(container["objectName"] + ".representation", diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py b/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py index 2d621353e6..0db89bee31 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py @@ -46,11 +46,18 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin): self.log.debug("data: {}".format(instance.data)) def get_hierarchy(self, nodes): - """Return nodes with all their children""" + """Return nodes with all their children. + + Arguments: + nodes (List[str]): List of nodes to collect children hierarchy for + + Returns: + list: Input nodes with their children hierarchy + + """ nodes = cmds.ls(nodes, long=True) if not nodes: return [] - children = get_all_children(nodes) - # Make sure nodes merged with children only - # contains unique entries - return list(set(nodes + children)) + + children = get_all_children(nodes, ignore_intermediate_objects=True) + return list(children.union(nodes)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py b/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py index 85be15bb7b..774c217cfd 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py @@ -48,15 +48,15 @@ class CollectNewInstances(pyblish.api.InstancePlugin): # Collect members members = cmds.ls(members, long=True) or [] + # Collect full hierarchy dag_members = cmds.ls(members, type="dagNode", long=True) - children = get_all_children(dag_members) - children = cmds.ls(children, noIntermediate=True, long=True) - parents = ( - self.get_all_parents(members) - if creator_attributes.get("includeParentHierarchy", True) - else [] - ) - members_hierarchy = list(set(members + children + parents)) + children = get_all_children(dag_members, + ignore_intermediate_objects=True) + + members_hierarchy = set(members) + members_hierarchy.update(children) + if creator_attributes.get("includeParentHierarchy", True): + members_hierarchy.update(self.get_all_parents(dag_members)) instance[:] = members_hierarchy @@ -97,16 +97,16 @@ class CollectNewInstances(pyblish.api.InstancePlugin): """Get all parents by using string operations (optimization) Args: - nodes (list): the nodes which are found in the objectSet + nodes (iterable): the nodes which are found in the objectSet Returns: - list + set """ - parents = [] + parents = set() for node in nodes: splitted = node.split("|") items = ["|".join(splitted[0:i]) for i in range(2, len(splitted))] - parents.extend(items) + parents.update(items) - return list(set(parents)) + return parents diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_render.py b/client/ayon_core/hosts/maya/plugins/publish/collect_render.py index 13eb8fd49e..21095935a2 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_render.py @@ -1,24 +1,19 @@ # -*- coding: utf-8 -*- """Collect render data. -This collector will go through render layers in maya and prepare all data -needed to create instances and their representations for submission and -publishing on farm. +This collector will go through renderlayer instances and prepare all data +needed to detect the expected rendered files for a layer, with resolution, +frame ranges and collects the data needed for publishing on the farm. Requires: instance -> families - instance -> setMembers - instance -> folderPath context -> currentFile - context -> workspaceDir context -> user -Optional: - Provides: instance -> label - instance -> productName + instance -> subset instance -> attachTo instance -> setMembers instance -> publish @@ -26,6 +21,8 @@ Provides: instance -> frameEnd instance -> byFrameStep instance -> renderer + instance -> family + instance -> asset instance -> time instance -> author instance -> source @@ -71,14 +68,11 @@ class CollectMayaRender(pyblish.api.InstancePlugin): # TODO: Re-add force enable of workfile instance? # TODO: Re-add legacy layer support with LAYER_ prefix but in Creator - # TODO: Set and collect active state of RenderLayer in Creator using - # renderlayer.isRenderable() context = instance.context layer = instance.data["transientData"]["layer"] objset = instance.data.get("instance_node") filepath = context.data["currentFile"].replace("\\", "/") - workspace = context.data["workspaceDir"] # check if layer is renderable if not layer.isRenderable(): @@ -113,7 +107,13 @@ class CollectMayaRender(pyblish.api.InstancePlugin): except UnsupportedRendererException as exc: raise KnownPublishError(exc) render_products = layer_render_products.layer_data.products - assert render_products, "no render products generated" + if not render_products: + self.log.error( + "No render products generated for '%s'. You might not have " + "any render camera in the renderlayer or render end frame is " + "lower than start frame.", + instance.name + ) expected_files = [] multipart = False for product in render_products: @@ -131,16 +131,21 @@ class CollectMayaRender(pyblish.api.InstancePlugin): }) has_cameras = any(product.camera for product in render_products) - assert has_cameras, "No render cameras found." - - self.log.debug("multipart: {}".format( - multipart)) - assert expected_files, "no file names were generated, this is a bug" - self.log.debug( - "expected files: {}".format( - json.dumps(expected_files, indent=4, sort_keys=True) + if render_products and not has_cameras: + self.log.error( + "No render cameras found for: %s", + instance ) - ) + if not expected_files: + self.log.warning( + "No file names were generated, this is a bug.") + + for render_product in render_products: + self.log.debug(render_product) + self.log.debug("multipart: {}".format(multipart)) + self.log.debug("expected files: {}".format( + json.dumps(expected_files, indent=4, sort_keys=True) + )) # if we want to attach render to product, check if we have AOV's # in expectedFiles. If so, raise error as we cannot attach AOV @@ -152,14 +157,14 @@ class CollectMayaRender(pyblish.api.InstancePlugin): ) # append full path - aov_dict = {} image_directory = os.path.join( cmds.workspace(query=True, rootDirectory=True), cmds.workspace(fileRuleEntry="images") ) # replace relative paths with absolute. Render products are # returned as list of dictionaries. - publish_meta_path = None + publish_meta_path = "NOT-SET" + aov_dict = {} for aov in expected_files: full_paths = [] aov_first_key = list(aov.keys())[0] @@ -170,14 +175,6 @@ class CollectMayaRender(pyblish.api.InstancePlugin): publish_meta_path = os.path.dirname(full_path) aov_dict[aov_first_key] = full_paths full_exp_files = [aov_dict] - self.log.debug(full_exp_files) - - if publish_meta_path is None: - raise KnownPublishError("Unable to detect any expected output " - "images for: {}. Make sure you have a " - "renderable camera and a valid frame " - "range set for your renderlayer." - "".format(instance.name)) frame_start_render = int(self.get_render_attribute( "startFrame", layer=layer_name)) @@ -223,7 +220,8 @@ class CollectMayaRender(pyblish.api.InstancePlugin): common_publish_meta_path = "/" + common_publish_meta_path self.log.debug( - "Publish meta path: {}".format(common_publish_meta_path)) + "Publish meta path: {}".format(common_publish_meta_path) + ) # Get layer specific settings, might be overrides colorspace_data = lib.get_color_management_preferences() @@ -314,7 +312,7 @@ class CollectMayaRender(pyblish.api.InstancePlugin): if not extend_frames: instance.data["overrideExistingFrame"] = False - # Update the instace + # Update the instance instance.data.update(data) @staticmethod diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py b/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py index 19825b769c..4b293b5785 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py @@ -5,7 +5,8 @@ from maya import cmds from ayon_core.pipeline import publish -class ExtractGPUCache(publish.Extractor): +class ExtractGPUCache(publish.Extractor, + publish.OptionalPyblishPluginMixin): """Extract the content of the instance to a GPU cache file.""" label = "GPU Cache" @@ -20,6 +21,9 @@ class ExtractGPUCache(publish.Extractor): useBaseTessellation = True def process(self, instance): + if not self.is_active(instance.data): + return + cmds.loadPlugin("gpuCache", quiet=True) staging_dir = self.staging_dir(instance) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_pointcache.py b/client/ayon_core/hosts/maya/plugins/publish/extract_pointcache.py index f2187063fc..5de72f7674 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_pointcache.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_pointcache.py @@ -26,6 +26,10 @@ class ExtractAlembic(publish.Extractor): families = ["pointcache", "model", "vrayproxy.alembic"] targets = ["local", "remote"] + # From settings + bake_attributes = [] + bake_attribute_prefixes = [] + def process(self, instance): if instance.data.get("farm"): self.log.debug("Should be processed on farm, skipping.") @@ -40,10 +44,12 @@ class ExtractAlembic(publish.Extractor): attrs = instance.data.get("attr", "").split(";") attrs = [value for value in attrs if value.strip()] attrs += instance.data.get("userDefinedAttributes", []) + attrs += self.bake_attributes attrs += ["cbId"] attr_prefixes = instance.data.get("attrPrefix", "").split(";") attr_prefixes = [value for value in attr_prefixes if value.strip()] + attr_prefixes += self.bake_attribute_prefixes self.log.debug("Extracting pointcache..") dirname = self.staging_dir(instance) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py index 8b88bfb9f8..1a389f3d33 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Create Unreal Skeletal Mesh data to be extracted as FBX.""" import os -from contextlib import contextmanager from maya import cmds # noqa diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py index edbb5f845e..6292afcf41 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py @@ -74,7 +74,7 @@ class ExtractUnrealSkeletalMeshFbx(publish.Extractor): renamed_to_extract.append("|".join(node_path)) with renamed(original_parent, parent_node): - self.log.debug("Extracting: {}".format(renamed_to_extract, path)) + self.log.debug("Extracting: {}".format(renamed_to_extract)) fbx_exporter.export(renamed_to_extract, path) if "representations" not in instance.data: diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_yeticache.py b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_yeticache.py index 9a264959d1..9a6b4ebaed 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_yeticache.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_yeticache.py @@ -5,13 +5,13 @@ from maya import cmds from ayon_core.pipeline import publish -class ExtractYetiCache(publish.Extractor): +class ExtractUnrealYetiCache(publish.Extractor): """Producing Yeti cache files using scene time range. This will extract Yeti cache file sequence and fur settings. """ - label = "Extract Yeti Cache" + label = "Extract Yeti Cache (Unreal)" hosts = ["maya"] families = ["yeticacheUE"] diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py b/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py index d305b8dc6c..d799486184 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py @@ -7,7 +7,6 @@ from maya import cmds import pyblish.api from ayon_core.hosts.maya.api.lib import extract_alembic from ayon_core.pipeline import publish -from ayon_core.lib import StringTemplate class ExtractWorkfileXgen(publish.Extractor): diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_xgen.py b/client/ayon_core/hosts/maya/plugins/publish/extract_xgen.py index 73668da28d..b672089a63 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_xgen.py @@ -9,7 +9,6 @@ from ayon_core.pipeline import publish from ayon_core.hosts.maya.api.lib import ( maintained_selection, attribute_values, write_xgen_file, delete_after ) -from ayon_core.lib import StringTemplate class ExtractXgen(publish.Extractor): diff --git a/client/ayon_core/hosts/maya/plugins/publish/help/validate_animation_out_set_related_node_ids.xml b/client/ayon_core/hosts/maya/plugins/publish/help/validate_animation_out_set_related_node_ids.xml new file mode 100644 index 0000000000..a855dd90a5 --- /dev/null +++ b/client/ayon_core/hosts/maya/plugins/publish/help/validate_animation_out_set_related_node_ids.xml @@ -0,0 +1,29 @@ + + + +Shape IDs mismatch original shape +## Shapes mismatch IDs with original shape + +Meshes are detected where the (deformed) mesh has a different `cbId` than +the same mesh in its deformation history. +Theses should normally be the same. + +### How to repair? + +By using the repair action the IDs from the shape in history will be +copied to the deformed shape. For **animation** instances using the +repair action usually is usually the correct fix. + + + +### How does this happen? + +When a deformer is applied in the scene on a referenced mesh that had no +deformers then Maya will create a new shape node for the mesh that +does not have the original id. Then on scene save new ids get created for the +meshes lacking a `cbId` and thus the mesh then has a different `cbId` than +the mesh in the deformation history. + + + + diff --git a/client/ayon_core/hosts/maya/plugins/publish/help/validate_mesh_non_manifold.xml b/client/ayon_core/hosts/maya/plugins/publish/help/validate_mesh_non_manifold.xml new file mode 100644 index 0000000000..5aec3009a7 --- /dev/null +++ b/client/ayon_core/hosts/maya/plugins/publish/help/validate_mesh_non_manifold.xml @@ -0,0 +1,33 @@ + + + +Non-Manifold Edges/Vertices +## Non-Manifold Edges/Vertices + +Meshes found with non-manifold edges or vertices. + +### How to repair? + +Run select invalid to select the invalid components. + +You can also try the _cleanup matching polygons_ action which will perform a +cleanup like Maya's `Mesh > Cleanup...` modeling tool. + +It is recommended to always select the invalid to see where the issue is +because if you run any repair on it you will need to double check the topology +is still like you wanted. + + + +### What is non-manifold topology? + +_Non-manifold topology_ polygons have a configuration that cannot be unfolded +into a continuous flat piece, for example: + +- Three or more faces share an edge +- Two or more faces share a single vertex but no edge. +- Adjacent faces have opposite normals + + + + diff --git a/client/ayon_core/hosts/maya/plugins/publish/help/validate_rig_out_set_node_ids.xml b/client/ayon_core/hosts/maya/plugins/publish/help/validate_rig_out_set_node_ids.xml new file mode 100644 index 0000000000..374b8e59ae --- /dev/null +++ b/client/ayon_core/hosts/maya/plugins/publish/help/validate_rig_out_set_node_ids.xml @@ -0,0 +1,32 @@ + + + +Shape IDs mismatch original shape +## Shapes mismatch IDs with original shape + +Meshes are detected in the **rig** where the (deformed) mesh has a different +`cbId` than the same mesh in its deformation history. +Theses should normally be the same. + +### How to repair? + +By using the repair action the IDs from the shape in history will be +copied to the deformed shape. For rig instances, in many cases the +correct fix is to use the repair action **unless** you explicitly tried +to update the `cbId` values on the meshes - in that case you actually want +to do to the reverse and copy the IDs from the deformed mesh to the history +mesh instead. + + + +### How does this happen? + +When a deformer is applied in the scene on a referenced mesh that had no +deformers then Maya will create a new shape node for the mesh that +does not have the original id. Then on scene save new ids get created for the +meshes lacking a `cbId` and thus the mesh then has a different `cbId` than +the mesh in the deformation history. + + + + diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/client/ayon_core/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py index 2502fd74b2..7ecd602662 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -6,7 +6,7 @@ from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, - PublishValidationError, + PublishXmlValidationError, OptionalPyblishPluginMixin, get_plugin_settings, apply_plugin_settings_automatically @@ -56,40 +56,39 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin, # if a deformer has been created on the shape invalid = self.get_invalid(instance) if invalid: - # TODO: Message formatting can be improved - raise PublishValidationError("Nodes found with mismatching " - "IDs: {0}".format(invalid), - title="Invalid node ids") + + # Use the short names + invalid = cmds.ls(invalid) + invalid.sort() + + # Construct a human-readable list + invalid = "\n".join("- {}".format(node) for node in invalid) + + raise PublishXmlValidationError( + plugin=self, + message=( + "Nodes have different IDs than their input " + "history: \n{0}".format(invalid) + ) + ) @classmethod def get_invalid(cls, instance): """Get all nodes which do not match the criteria""" invalid = [] - types_to_skip = ["locator"] + types = ["mesh", "nurbsCurve", "nurbsSurface"] # get asset id nodes = instance.data.get("out_hierarchy", instance[:]) - for node in nodes: + for node in cmds.ls(nodes, type=types, long=True): # We only check when the node is *not* referenced if cmds.referenceQuery(node, isNodeReferenced=True): continue - # Check if node is a shape as deformers only work on shapes - obj_type = cmds.objectType(node, isAType="shape") - if not obj_type: - continue - - # Skip specific types - if cmds.objectType(node) in types_to_skip: - continue - # Get the current id of the node node_id = lib.get_id(node) - if not node_id: - invalid.append(node) - continue history_id = lib.get_id_from_sibling(node) if history_id is not None and node_id != history_id: diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_color_sets.py b/client/ayon_core/hosts/maya/plugins/publish/validate_color_sets.py index e69717fad0..f70b46f89e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_color_sets.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_color_sets.py @@ -10,7 +10,7 @@ from ayon_core.pipeline.publish import ( ) -class ValidateColorSets(pyblish.api.Validator, +class ValidateColorSets(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate all meshes in the instance have unlocked normals diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_subset.py b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_subset.py index da3a194e58..df9ca0bf13 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_subset.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_subset.py @@ -36,18 +36,18 @@ class ValidateSubsetName(pyblish.api.InstancePlugin): ) if not isinstance(product_name, six.string_types): - raise TypeError(( + raise PublishValidationError(( "Instance product name must be string, got: {0} ({1})" ).format(product_name, type(product_name))) # Ensure is not empty product if not product_name: - raise ValueError( + raise PublishValidationError( "Instance product name is empty: {0}".format(product_name) ) # Validate product characters if not validate_name(product_name): - raise ValueError(( + raise PublishValidationError(( "Instance product name contains invalid characters: {0}" ).format(product_name)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py b/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py index e70a805de4..070974aef5 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py @@ -47,10 +47,18 @@ class ValidateShadingEngine(pyblish.api.InstancePlugin, shape, destination=True, type="shadingEngine" ) or [] for shading_engine in shading_engines: - name = ( - cmds.listConnections(shading_engine + ".surfaceShader")[0] - + "SG" + materials = cmds.listConnections( + shading_engine + ".surfaceShader", + source=True, destination=False ) + if not materials: + cls.log.warning( + "Shading engine '{}' has no material connected to its " + ".surfaceShader attribute.".format(shading_engine)) + continue + + material = materials[0] # there should only ever be one input + name = material + "SG" if shading_engine != name: invalid.append(shading_engine) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py b/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py index f1c171bddc..47314b64ac 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py @@ -3,7 +3,6 @@ import maya.cmds as cmds import pyblish.api import ayon_core.hosts.maya.api.lib as mayalib -from ayon_core.pipeline.context_tools import get_current_project_folder from ayon_core.pipeline.publish import ( RepairContextAction, ValidateSceneOrder, @@ -131,6 +130,5 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin, cls.log.debug(current_linear) cls.log.info("Setting time unit to match project") - # TODO replace query with using 'context.data["folderEntity"]' - folder_entity = get_current_project_folder() + folder_entity = context.data["folderEntity"] mayalib.set_scene_fps(folder_entity["attrib"]["fps"]) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_empty.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_empty.py index 934cbae327..c95e1ec816 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_empty.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_empty.py @@ -51,5 +51,5 @@ class ValidateMeshEmpty(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( - "Meshes found in instance without any vertices: %s" % invalid + "Meshes found without any vertices: %s" % invalid ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py index 8f80b689fd..bfb4257f23 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py @@ -2,7 +2,11 @@ from maya import cmds import pyblish.api import ayon_core.hosts.maya.api.action -from ayon_core.pipeline.publish import ValidateMeshOrder, OptionalPyblishPluginMixin +from ayon_core.pipeline.publish import ( + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin, @@ -20,6 +24,16 @@ class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin, actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] optional = True + description = ( + "## Meshes with Lamina Faces\n" + "Detected meshes with lamina faces. Lamina faces are faces " + "that share all of their edges and thus are merged together on top of " + "each other.\n\n" + "### How to repair?\n" + "You can repair them by using Maya's modeling tool `Mesh > Cleanup..` " + "and select to cleanup matching polygons for lamina faces." + ) + @staticmethod def get_invalid(instance): meshes = cmds.ls(instance, type='mesh', long=True) @@ -36,5 +50,6 @@ class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin, invalid = self.get_invalid(instance) if invalid: - raise ValueError("Meshes found with lamina faces: " - "{0}".format(invalid)) + raise PublishValidationError( + "Meshes found with lamina faces: {0}".format(invalid), + description=self.description) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py index 5f107b7f7e..d1d7e49fa4 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py @@ -5,11 +5,12 @@ import ayon_core.hosts.maya.api.action from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( ValidateContentsOrder, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, + PublishValidationError ) -class ValidateMeshNgons(pyblish.api.Validator, +class ValidateMeshNgons(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure that meshes don't have ngons @@ -27,6 +28,15 @@ class ValidateMeshNgons(pyblish.api.Validator, actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] optional = True + description = ( + "## Meshes with NGONs Faces\n" + "Detected meshes with NGON faces. **NGONS** are faces that " + "with more than four sides.\n\n" + "### How to repair?\n" + "You can repair them by usings Maya's modeling tool Mesh > Cleanup.. " + "and select to cleanup matching polygons for lamina faces." + ) + @staticmethod def get_invalid(instance): @@ -49,5 +59,6 @@ class ValidateMeshNgons(pyblish.api.Validator, invalid = self.get_invalid(instance) if invalid: - raise ValueError("Meshes found with n-gons" - "values: {0}".format(invalid)) + raise PublishValidationError( + "Meshes found with n-gons: {0}".format(invalid), + description=self.description) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py index ff1dca87cf..bf1489f92e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py @@ -16,7 +16,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateMeshNoNegativeScale(pyblish.api.Validator, +class ValidateMeshNoNegativeScale(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure that meshes don't have a negative scale. diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_non_manifold.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_non_manifold.py index 6dbad538ef..958707e4f4 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_non_manifold.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_non_manifold.py @@ -1,14 +1,99 @@ -from maya import cmds +from maya import cmds, mel import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( ValidateMeshOrder, - PublishValidationError, + PublishXmlValidationError, + RepairAction, OptionalPyblishPluginMixin ) +def poly_cleanup(version=4, + meshes=None, + # Version 1 + all_meshes=False, + select_only=False, + history_on=True, + quads=False, + nsided=False, + concave=False, + holed=False, + nonplanar=False, + zeroGeom=False, + zeroGeomTolerance=1e-05, + zeroEdge=False, + zeroEdgeTolerance=1e-05, + zeroMap=False, + zeroMapTolerance=1e-05, + # Version 2 + shared_uvs=False, + non_manifold=False, + # Version 3 + lamina=False, + # Version 4 + invalid_components=False): + """Wrapper around `polyCleanupArgList` mel command""" + + # Get all inputs named as `dict` to easily do conversions and formatting + values = locals() + + # Convert booleans to 1 or 0 + for key in [ + "all_meshes", + "select_only", + "history_on", + "quads", + "nsided", + "concave", + "holed", + "nonplanar", + "zeroGeom", + "zeroEdge", + "zeroMap", + "shared_uvs", + "non_manifold", + "lamina", + "invalid_components", + ]: + values[key] = 1 if values[key] else 0 + + cmd = ( + 'polyCleanupArgList {version} {{ ' + '"{all_meshes}",' # 0: All selectable meshes + '"{select_only}",' # 1: Only perform a selection + '"{history_on}",' # 2: Keep construction history + '"{quads}",' # 3: Check for quads polys + '"{nsided}",' # 4: Check for n-sides polys + '"{concave}",' # 5: Check for concave polys + '"{holed}",' # 6: Check for holed polys + '"{nonplanar}",' # 7: Check for non-planar polys + '"{zeroGeom}",' # 8: Check for 0 area faces + '"{zeroGeomTolerance}",' # 9: Tolerance for face areas + '"{zeroEdge}",' # 10: Check for 0 length edges + '"{zeroEdgeTolerance}",' # 11: Tolerance for edge length + '"{zeroMap}",' # 12: Check for 0 uv face area + '"{zeroMapTolerance}",' # 13: Tolerance for uv face areas + '"{shared_uvs}",' # 14: Unshare uvs that are shared + # across vertices + '"{non_manifold}",' # 15: Check for nonmanifold polys + '"{lamina}",' # 16: Check for lamina polys + '"{invalid_components}"' # 17: Remove invalid components + ' }};'.format(**values) + ) + + mel.eval("source polyCleanupArgList") + if not all_meshes and meshes: + # Allow to specify meshes to run over by selecting them + cmds.select(meshes, replace=True) + mel.eval(cmd) + + +class CleanupMatchingPolygons(RepairAction): + label = "Cleanup matching polygons" + + def _as_report_list(values, prefix="- ", suffix="\n"): """Return list as bullet point list for a report""" if not values: @@ -16,7 +101,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateMeshNonManifold(pyblish.api.Validator, +class ValidateMeshNonManifold(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure that meshes don't have non-manifold edges or vertices @@ -29,7 +114,8 @@ class ValidateMeshNonManifold(pyblish.api.Validator, hosts = ['maya'] families = ['model'] label = 'Mesh Non-Manifold Edges/Vertices' - actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, + CleanupMatchingPolygons] optional = True @staticmethod @@ -39,9 +125,11 @@ class ValidateMeshNonManifold(pyblish.api.Validator, invalid = [] for mesh in meshes: - if (cmds.polyInfo(mesh, nonManifoldVertices=True) or - cmds.polyInfo(mesh, nonManifoldEdges=True)): - invalid.append(mesh) + components = cmds.polyInfo(mesh, + nonManifoldVertices=True, + nonManifoldEdges=True) + if components: + invalid.extend(components) return invalid @@ -49,12 +137,34 @@ class ValidateMeshNonManifold(pyblish.api.Validator, """Process all the nodes in the instance 'objectSet'""" if not self.is_active(instance.data): return + invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError( - "Meshes found with non-manifold edges/vertices:\n\n{0}".format( - _as_report_list(sorted(invalid)) - ), - title="Non-Manifold Edges/Vertices" + # Report only the meshes instead of all component indices + invalid_meshes = { + component.split(".", 1)[0] for component in invalid + } + invalid_meshes = _as_report_list(sorted(invalid_meshes)) + + raise PublishXmlValidationError( + plugin=self, + message=( + "Meshes found with non-manifold " + "edges/vertices:\n\n{0}".format(invalid_meshes) + ) ) + + @classmethod + def repair(cls, instance): + invalid_components = cls.get_invalid(instance) + if not invalid_components: + cls.log.info("No invalid components found to cleanup.") + return + + invalid_meshes = { + component.split(".", 1)[0] for component in invalid_components + } + poly_cleanup(meshes=list(invalid_meshes), + select_only=True, + non_manifold=True) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py index 1790a94580..76b716d01f 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py @@ -18,7 +18,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateMeshNormalsUnlocked(pyblish.api.Validator, +class ValidateMeshNormalsUnlocked(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate all meshes in the instance have unlocked normals diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_shader_connections.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_shader_connections.py index 8672ac13dd..70ede83f2d 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_shader_connections.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_shader_connections.py @@ -107,8 +107,9 @@ class ValidateMeshShaderConnections(pyblish.api.InstancePlugin, invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError("Shapes found with invalid shader " - "connections: {0}".format(invalid)) + raise PublishValidationError( + "Shapes found with invalid shader connections: " + "{0}".format(invalid)) @staticmethod def get_invalid(instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py index 8dbd0ca264..21697cd903 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py @@ -6,7 +6,8 @@ from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( RepairAction, ValidateMeshOrder, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, + PublishValidationError ) @@ -66,7 +67,7 @@ class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin, if allowed: self.log.warning(message) else: - raise ValueError(message) + raise PublishValidationError(message) @classmethod def repair(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py index c7f405b0cf..a139b65169 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py @@ -5,7 +5,8 @@ import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( RepairAction, ValidateMeshOrder, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, + PublishValidationError ) @@ -55,8 +56,8 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, invalid = self.get_invalid(instance) if invalid: - raise ValueError("Meshes found without 'map1' " - "UV set: {0}".format(invalid)) + raise PublishValidationError( + "Meshes found without 'map1' UV set: {0}".format(invalid)) @classmethod def repair(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_no_animation.py b/client/ayon_core/hosts/maya/plugins/publish/validate_no_animation.py index 6e0719628f..bf45c0e974 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_no_animation.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_no_animation.py @@ -16,7 +16,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateNoAnimation(pyblish.api.Validator, +class ValidateNoAnimation(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure no keyframes on nodes in the Instance. diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_no_namespace.py b/client/ayon_core/hosts/maya/plugins/publish/validate_no_namespace.py index 7ea2a79339..f546caff2c 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_no_namespace.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_no_namespace.py @@ -49,11 +49,17 @@ class ValidateNoNamespace(pyblish.api.InstancePlugin, invalid = self.get_invalid(instance) if invalid: + invalid_namespaces = {get_namespace(node) for node in invalid} raise PublishValidationError( - "Namespaces found:\n\n{0}".format( - _as_report_list(sorted(invalid)) + message="Namespaces found:\n\n{0}".format( + _as_report_list(sorted(invalid_namespaces)) ), - title="Namespaces in model" + title="Namespaces in model", + description=( + "## Namespaces found in model\n" + "It is not allowed to publish a model that contains " + "namespaces." + ) ) @classmethod diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_no_null_transforms.py b/client/ayon_core/hosts/maya/plugins/publish/validate_no_null_transforms.py index a9dc1d5bef..38955fd777 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_no_null_transforms.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_no_null_transforms.py @@ -19,22 +19,17 @@ def _as_report_list(values, prefix="- ", suffix="\n"): def has_shape_children(node): # Check if any descendants - allDescendents = cmds.listRelatives(node, - allDescendents=True, - fullPath=True) - if not allDescendents: + all_descendents = cmds.listRelatives(node, + allDescendents=True, + fullPath=True) + if not all_descendents: return False # Check if there are any shapes at all - shapes = cmds.ls(allDescendents, shapes=True) + shapes = cmds.ls(all_descendents, shapes=True, noIntermediate=True) if not shapes: return False - # Check if all descendent shapes are intermediateObjects; - # if so we consider this node a null node and return False. - if all(cmds.getAttr('{0}.intermediateObject'.format(x)) for x in shapes): - return False - return True diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_no_ghosting.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_no_ghosting.py index 73701f8d83..10cbbc9a88 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_no_ghosting.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_no_ghosting.py @@ -5,10 +5,12 @@ import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( ValidateContentsOrder, - OptionalPyblishPluginMixin - + OptionalPyblishPluginMixin, + PublishValidationError ) -class ValidateNodeNoGhosting(pyblish.api.InstancePlugin. + + +class ValidateNodeNoGhosting(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure nodes do not have ghosting enabled. @@ -55,5 +57,5 @@ class ValidateNodeNoGhosting(pyblish.api.InstancePlugin. invalid = self.get_invalid(instance) if invalid: - raise ValueError("Nodes with ghosting enabled found: " - "{0}".format(invalid)) + raise PublishValidationError( + "Nodes with ghosting enabled found: {0}".format(invalid)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py b/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py index 0171318813..e186d74b89 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py @@ -1,4 +1,5 @@ import re +import inspect import pyblish.api from maya import cmds @@ -36,7 +37,10 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin, return invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError("Invalid cameras for render.") + raise PublishValidationError( + "Invalid render cameras.", + description=self.get_description() + ) @classmethod def get_invalid(cls, instance): @@ -51,17 +55,30 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin, RenderSettings.get_image_prefix_attr(renderer) ) - + renderlayer = instance.data["renderlayer"] if len(cameras) > 1: if re.search(cls.R_CAMERA_TOKEN, file_prefix): # if there is token in prefix and we have more then # 1 camera, all is ok. return - cls.log.error("Multiple renderable cameras found for %s: %s " % - (instance.data["setMembers"], cameras)) - return [instance.data["setMembers"]] + cameras + cls.log.error( + "Multiple renderable cameras found for %s: %s ", + renderlayer, ", ".join(cameras)) + return [renderlayer] + cameras elif len(cameras) < 1: - cls.log.error("No renderable cameras found for %s " % - instance.data["setMembers"]) - return [instance.data["setMembers"]] + cls.log.error("No renderable cameras found for %s ", renderlayer) + return [renderlayer] + + def get_description(self): + return inspect.cleandoc( + """### Render Cameras Invalid + + Your render cameras are misconfigured. You may have no render + camera set or have multiple cameras with a render filename + prefix that does not include the `` token. + + See the logs for more details about the cameras. + + """ + ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index c55953df7a..d94ddc5f2a 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -7,7 +7,7 @@ from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, - PublishValidationError, + PublishXmlValidationError, OptionalPyblishPluginMixin, get_plugin_settings, apply_plugin_settings_automatically @@ -58,8 +58,20 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin, # if a deformer has been created on the shape invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError( - "Nodes found with mismatching IDs: {0}".format(invalid) + + # Use the short names + invalid = cmds.ls(invalid) + invalid.sort() + + # Construct a human-readable list + invalid = "\n".join("- {}".format(node) for node in invalid) + + raise PublishXmlValidationError( + plugin=ValidateRigOutSetNodeIds, + message=( + "Rig nodes have different IDs than their input " + "history: \n{0}".format(invalid) + ) ) @classmethod diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_setdress_root.py b/client/ayon_core/hosts/maya/plugins/publish/validate_setdress_root.py index 906f6fbd1a..f88e33fdfb 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_setdress_root.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_setdress_root.py @@ -1,5 +1,8 @@ import pyblish.api -from ayon_core.pipeline.publish import ValidateContentsOrder +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError +) class ValidateSetdressRoot(pyblish.api.InstancePlugin): @@ -20,4 +23,6 @@ class ValidateSetdressRoot(pyblish.api.InstancePlugin): root = cmds.ls(set_member, assemblies=True, long=True) if not root or root[0] not in set_member: - raise Exception("Setdress top root node is not being published.") + raise PublishValidationError( + "Setdress top root node is not being published." + ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_default_names.py b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_default_names.py index 2f0811a73e..c4c4c909d3 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_default_names.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_default_names.py @@ -8,7 +8,8 @@ import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( ValidateContentsOrder, RepairAction, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, + PublishValidationError ) @@ -84,8 +85,8 @@ class ValidateShapeDefaultNames(pyblish.api.InstancePlugin, invalid = self.get_invalid(instance) if invalid: - raise ValueError("Incorrectly named shapes " - "found: {0}".format(invalid)) + raise PublishValidationError( + "Incorrectly named shapes found: {0}".format(invalid)) @classmethod def repair(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_render_stats.py b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_render_stats.py index 2783a6dbe8..52ce3c5436 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_render_stats.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_render_stats.py @@ -6,11 +6,12 @@ import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( RepairAction, ValidateMeshOrder, + PublishValidationError, OptionalPyblishPluginMixin ) -class ValidateShapeRenderStats(pyblish.api.Validator, +class ValidateShapeRenderStats(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure all render stats are set to the default values.""" @@ -20,7 +21,6 @@ class ValidateShapeRenderStats(pyblish.api.Validator, label = 'Shape Default Render Stats' actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, RepairAction] - optional = True defaults = {'castsShadows': 1, 'receiveShadows': 1, @@ -37,14 +37,13 @@ class ValidateShapeRenderStats(pyblish.api.Validator, # It seems the "surfaceShape" and those derived from it have # `renderStat` attributes. shapes = cmds.ls(instance, long=True, type='surfaceShape') - invalid = [] + invalid = set() for shape in shapes: - _iteritems = getattr(cls.defaults, "iteritems", cls.defaults.items) - for attr, default_value in _iteritems(): + for attr, default_value in cls.defaults.items(): if cmds.attributeQuery(attr, node=shape, exists=True): value = cmds.getAttr('{}.{}'.format(shape, attr)) if value != default_value: - invalid.append(shape) + invalid.add(shape) return invalid @@ -52,17 +51,36 @@ class ValidateShapeRenderStats(pyblish.api.Validator, if not self.is_active(instance.data): return invalid = self.get_invalid(instance) + if not invalid: + return - if invalid: - raise ValueError("Shapes with non-default renderStats " - "found: {0}".format(invalid)) + defaults_str = "\n".join( + "- {}: {}\n".format(key, value) + for key, value in self.defaults.items() + ) + description = ( + "## Shape Default Render Stats\n" + "Shapes are detected with non-default render stats.\n\n" + "To ensure a model's shapes behave like a shape would by default " + "we require the render stats to have not been altered in " + "the published models.\n\n" + "### How to repair?\n" + "You can reset the default values on the shapes by using the " + "repair action." + ) + + raise PublishValidationError( + "Shapes with non-default renderStats " + "found: {0}".format(", ".join(sorted(invalid))), + description=description, + detail="The expected default values " + "are:\n\n{}".format(defaults_str) + ) @classmethod def repair(cls, instance): for shape in cls.get_invalid(instance): - _iteritems = getattr(cls.defaults, "iteritems", cls.defaults.items) - for attr, default_value in _iteritems(): - + for attr, default_value in cls.defaults.items(): if cmds.attributeQuery(attr, node=shape, exists=True): plug = '{0}.{1}'.format(shape, attr) value = cmds.getAttr(plug) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_zero.py b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_zero.py index 4f4826776c..6c89258085 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_zero.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_zero.py @@ -12,7 +12,7 @@ from ayon_core.pipeline.publish import ( ) -class ValidateShapeZero(pyblish.api.Validator, +class ValidateShapeZero(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Shape components may not have any "tweak" values diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_single_assembly.py b/client/ayon_core/hosts/maya/plugins/publish/validate_single_assembly.py index 1987f93e32..f5d73553d3 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_single_assembly.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_single_assembly.py @@ -1,5 +1,8 @@ import pyblish.api -from ayon_core.pipeline.publish import ValidateContentsOrder +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError +) class ValidateSingleAssembly(pyblish.api.InstancePlugin): @@ -30,7 +33,11 @@ class ValidateSingleAssembly(pyblish.api.InstancePlugin): # ensure unique (somehow `maya.cmds.ls` doesn't manage that) assemblies = set(assemblies) - assert len(assemblies) > 0, ( - "One assembly required for: %s (currently empty?)" % instance) - assert len(assemblies) < 2, ( - 'Multiple assemblies found: %s' % assemblies) + if len(assemblies) == 0: + raise PublishValidationError( + "One assembly required for: %s (currently empty?)" % instance + ) + elif len(assemblies) > 1: + raise PublishValidationError( + 'Multiple assemblies found: %s' % assemblies + ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py b/client/ayon_core/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py index 48d8e63553..a548e12f33 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py @@ -3,7 +3,11 @@ from maya import cmds import pyblish.api import ayon_core.hosts.maya.api.action -from ayon_core.pipeline.publish import ValidateContentsOrder,OptionalPyblishPluginMixin +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) class ValidateSkinclusterDeformerSet(pyblish.api.InstancePlugin, @@ -30,8 +34,10 @@ class ValidateSkinclusterDeformerSet(pyblish.api.InstancePlugin, invalid = self.get_invalid(instance) if invalid: - raise ValueError("Invalid skinCluster relationships " - "found on meshes: {0}".format(invalid)) + raise PublishValidationError( + "Invalid skinCluster relationships found on meshes: {0}" + .format(invalid) + ) @classmethod def get_invalid(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_step_size.py b/client/ayon_core/hosts/maya/plugins/publish/validate_step_size.py index a3419a83a9..a276a5b644 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_step_size.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_step_size.py @@ -29,7 +29,7 @@ class ValidateStepSize(pyblish.api.InstancePlugin, @classmethod def get_invalid(cls, instance): - objset = instance.data['name'] + objset = instance.data['instance_node'] step = instance.data.get("step", 1.0) if step < cls.MIN or step > cls.MAX: @@ -47,4 +47,4 @@ class ValidateStepSize(pyblish.api.InstancePlugin, invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( - "Invalid instances found: {0}".format(invalid)) + "Instance found with invalid step size: {0}".format(invalid)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_transform_zero.py b/client/ayon_core/hosts/maya/plugins/publish/validate_transform_zero.py index 1cbdd05b0b..cd96ebb10d 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_transform_zero.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_transform_zero.py @@ -1,5 +1,6 @@ -from maya import cmds +import inspect +from maya import cmds import pyblish.api import ayon_core.hosts.maya.api.action @@ -10,7 +11,7 @@ from ayon_core.pipeline.publish import ( ) -class ValidateTransformZero(pyblish.api.Validator, +class ValidateTransformZero(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Transforms can't have any values @@ -57,7 +58,7 @@ class ValidateTransformZero(pyblish.api.Validator, if ('_LOC' in transform) or ('_loc' in transform): continue mat = cmds.xform(transform, q=1, matrix=True, objectSpace=True) - if not all(abs(x-y) < cls._tolerance + if not all(abs(x - y) < cls._tolerance for x, y in zip(cls._identity, mat)): invalid.append(transform) @@ -69,14 +70,24 @@ class ValidateTransformZero(pyblish.api.Validator, return invalid = self.get_invalid(instance) if invalid: - names = "
".join( " - {}".format(node) for node in invalid ) raise PublishValidationError( title="Transform Zero", + description=self.get_description(), message="The model publish allows no transformations. You must" " freeze transformations to continue.

" - "Nodes found with transform values: " + "Nodes found with transform values:
" "{0}".format(names)) + + @staticmethod + def get_description(): + return inspect.cleandoc("""### Transform can't have any values + + The model publish allows no transformations. + + You must **freeze transformations** to continue. + + """) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_unique_names.py b/client/ayon_core/hosts/maya/plugins/publish/validate_unique_names.py index 8ec704ddd1..0066d70531 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_unique_names.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_unique_names.py @@ -4,10 +4,12 @@ import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( ValidateContentsOrder, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, + PublishValidationError ) -class ValidateUniqueNames(pyblish.api.Validator, + +class ValidateUniqueNames(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """transform names should be unique @@ -40,5 +42,5 @@ class ValidateUniqueNames(pyblish.api.Validator, return invalid = self.get_invalid(instance) if invalid: - raise ValueError("Nodes found with none unique names. " - "values: {0}".format(invalid)) + raise PublishValidationError( + "Nodes found with non-unique names:\n{0}".format(invalid)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py b/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py index 101bd5bf04..6440c00eae 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py @@ -5,7 +5,8 @@ import pyblish.api from ayon_core.pipeline.publish import ( ValidateMeshOrder, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, + PublishValidationError ) import ayon_core.hosts.maya.api.action @@ -26,8 +27,8 @@ class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin, invalid = [] meshes = cmds.ls(instance, type="mesh", long=True) for mesh in meshes: - faces = cmds.polyEvaluate(mesh, f=True) - tris = cmds.polyEvaluate(mesh, t=True) + faces = cmds.polyEvaluate(mesh, face=True) + tris = cmds.polyEvaluate(mesh, triangle=True) if faces != tris: invalid.append(mesh) @@ -37,5 +38,5 @@ class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin, if not self.is_active(instance.data): return invalid = self.get_invalid(instance) - assert len(invalid) == 0, ( - "Found meshes without triangles") + if invalid: + raise PublishValidationError("Found meshes without triangles") diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_up_axis.py b/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_up_axis.py index ef7296e628..f7acd41cea 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_up_axis.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_up_axis.py @@ -6,7 +6,8 @@ import pyblish.api from ayon_core.pipeline.publish import ( ValidateContentsOrder, RepairAction, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, + PublishValidationError ) @@ -26,9 +27,10 @@ class ValidateUnrealUpAxis(pyblish.api.ContextPlugin, if not self.is_active(context.data): return - assert cmds.upAxis(q=True, axis=True) == "z", ( - "Invalid axis set as up axis" - ) + if cmds.upAxis(q=True, axis=True) != "z": + raise PublishValidationError( + "Invalid axis set as up axis" + ) @classmethod def repair(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_visible_only.py b/client/ayon_core/hosts/maya/plugins/publish/validate_visible_only.py index af6c9a64c6..1fdb476dba 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_visible_only.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_visible_only.py @@ -34,8 +34,9 @@ class ValidateAlembicVisibleOnly(pyblish.api.InstancePlugin, invalid = self.get_invalid(instance) if invalid: start, end = self.get_frame_range(instance) - raise PublishValidationError("No visible nodes found in " - "frame range {}-{}.".format(start, end)) + raise PublishValidationError( + f"No visible nodes found in frame range {start}-{end}." + ) @classmethod def get_invalid(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py b/client/ayon_core/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py index b35508d635..b3978b8483 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py @@ -3,6 +3,7 @@ from maya import cmds from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( + KnownPublishError, PublishValidationError, RepairAction, ValidateContentsOrder, @@ -35,11 +36,14 @@ class ValidateVRayDistributedRendering(pyblish.api.InstancePlugin, if not self.is_active(instance.data): return if instance.data.get("renderer") != "vray": - # If not V-Ray ignore.. + # If not V-Ray, ignore return vray_settings = cmds.ls("vraySettings", type="VRaySettingsNode") - assert vray_settings, "Please ensure a VRay Settings Node is present" + if not vray_settings: + raise KnownPublishError( + "Please ensure a VRay Settings Node is present" + ) renderlayer = instance.data['renderlayer'] @@ -51,8 +55,8 @@ class ValidateVRayDistributedRendering(pyblish.api.InstancePlugin, # during batch mode we invalidate the instance if not lib.get_attr_in_layer(self.ignored_attr, layer=renderlayer): raise PublishValidationError( - ("Renderlayer has distributed rendering enabled " - "but is not set to ignore in batch mode.")) + "Renderlayer has distributed rendering enabled " + "but is not set to ignore in batch mode.") @classmethod def repair(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py b/client/ayon_core/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py index 7c480a6bf7..9df5fb8488 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py @@ -6,9 +6,11 @@ from maya import cmds from ayon_core.pipeline.publish import ( RepairContextAction, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, + PublishValidationError ) + class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate whether the V-Ray Render Elements (AOVs) include references. @@ -60,7 +62,7 @@ class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin, self.log.error(( "'Use referenced' not enabled in Vray Render Settings." )) - raise AssertionError("Invalid render settings") + raise PublishValidationError("Invalid render settings") @classmethod def repair(cls, context): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_vrayproxy.py b/client/ayon_core/hosts/maya/plugins/publish/validate_vrayproxy.py index 29b8be411c..0288d4b865 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_vrayproxy.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_vrayproxy.py @@ -1,7 +1,10 @@ import pyblish.api -from ayon_core.pipeline import KnownPublishError -from ayon_core.pipeline.publish import OptionalPyblishPluginMixin +from ayon_core.pipeline.publish import ( + OptionalPyblishPluginMixin, + PublishValidationError +) + class ValidateVrayProxy(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): @@ -17,18 +20,18 @@ class ValidateVrayProxy(pyblish.api.InstancePlugin, if not self.is_active(data): return if not data["setMembers"]: - raise KnownPublishError( - "'%s' is empty! This is a bug" % instance.name + raise PublishValidationError( + f"Instance '{instance.name}' is empty." ) if data["animation"]: if data["frameEnd"] < data["frameStart"]: - raise KnownPublishError( + raise PublishValidationError( "End frame is smaller than start frame" ) if not data["vrmesh"] and not data["alembic"]: - raise KnownPublishError( + raise PublishValidationError( "Both vrmesh and alembic are off. Needs at least one to" " publish." ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_xgen.py b/client/ayon_core/hosts/maya/plugins/publish/validate_xgen.py index e2c006be9f..7e0f01c482 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_xgen.py @@ -34,7 +34,7 @@ class ValidateXgen(pyblish.api.InstancePlugin): " Node type found: {}".format(node_type) ) - # Cant have inactive modifiers in collection cause Xgen will try and + # Can't have inactive modifiers in collection cause Xgen will try and # look for them when loading. palette = instance.data["xgmPalette"].replace("|", "") inactive_modifiers = {} diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py index 35b2443718..086cb7b1f5 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py @@ -3,9 +3,11 @@ from maya import cmds import pyblish.api from ayon_core.pipeline.publish import ( ValidateContentsOrder, + PublishValidationError, OptionalPyblishPluginMixin ) + class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Check if the render script callbacks will be used during the rendering @@ -45,8 +47,8 @@ class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin, return invalid = self.get_invalid(instance) if invalid: - raise ValueError("Invalid render callbacks found for '%s'!" - % instance.name) + raise PublishValidationError( + f"Invalid render callbacks found for '{instance.name}'.") @classmethod def get_invalid(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py index d81534192a..84614fc0be 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py @@ -1,3 +1,5 @@ +import inspect + import pyblish.api import maya.cmds as cmds import ayon_core.hosts.maya.api.action @@ -8,7 +10,6 @@ from ayon_core.pipeline.publish import ( ) - class ValidateYetiRigCacheState(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate the I/O attributes of the node @@ -32,7 +33,10 @@ class ValidateYetiRigCacheState(pyblish.api.InstancePlugin, return invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError("Nodes have incorrect I/O settings") + raise PublishValidationError( + "Nodes have incorrect I/O settings", + description=inspect.getdoc(self) + ) @classmethod def get_invalid(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py index aa229875fe..77e189e37b 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py @@ -10,7 +10,7 @@ from ayon_core.pipeline.publish import ( ) -class ValidateYetiRigInputShapesInInstance(pyblish.api.Validator, +class ValidateYetiRigInputShapesInInstance(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate if all input nodes are part of the instance's hierarchy""" diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py index 75c82164c2..ad43a24385 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py @@ -49,8 +49,9 @@ def get_selected_nodes(): """Get information from current selection""" selection = cmds.ls(selection=True, long=True) - hierarchy = lib.get_all_children(selection) - return list(set(selection + hierarchy)) + hierarchy = lib.get_all_children(selection, + ignore_intermediate_objects=True) + return list(hierarchy.union(selection)) def get_all_asset_nodes(): diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/models.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/models.py index f5dad25ff0..b0807be6a6 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/models.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/models.py @@ -29,7 +29,8 @@ class AssetModel(models.TreeModel): self.beginResetModel() # Add the items sorted by label - sorter = lambda x: x["label"] + def sorter(x): + return x["label"] for item in sorted(items, key=sorter): diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py index 74cdbeb7d4..88ef4b201a 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py @@ -51,7 +51,7 @@ def assign_vrayproxy_shaders(vrayproxy, assignments): index += 1 -def vrayproxy_assign_look(vrayproxy, product_name="lookDefault"): +def vrayproxy_assign_look(vrayproxy, product_name="lookMain"): # type: (str, str) -> None """Assign look to vray proxy. diff --git a/client/ayon_core/hosts/nuke/api/lib.py b/client/ayon_core/hosts/nuke/api/lib.py index 1bb0ff79e0..78cbe85097 100644 --- a/client/ayon_core/hosts/nuke/api/lib.py +++ b/client/ayon_core/hosts/nuke/api/lib.py @@ -389,7 +389,13 @@ def imprint(node, data, tab=None): """ for knob in create_knobs(data, tab): - node.addKnob(knob) + # If knob name exists we set the value. Technically there could be + # multiple knobs with the same name, but the intent is not to have + # duplicated knobs so we do not account for that. + if knob.name() in node.knobs().keys(): + node[knob.name()].setValue(knob.value()) + else: + node.addKnob(knob) @deprecated @@ -814,7 +820,7 @@ def on_script_load(): def check_inventory_versions(): """ - Actual version idetifier of Loaded containers + Actual version identifier of Loaded containers Any time this function is run it will check all nodes and filter only Loader nodes for its version. It will get all versions from database @@ -921,7 +927,7 @@ def writes_version_sync(): for each in nuke.allNodes(filter="Write"): # check if the node is avalon tracked - if _NODE_TAB_NAME not in each.knobs(): + if NODE_TAB_NAME not in each.knobs(): continue avalon_knob_data = read_avalon_data(each) @@ -2381,7 +2387,7 @@ def launch_workfiles_app(): Context.workfiles_launched = True - # get all imortant settings + # get all important settings open_at_start = env_value_to_bool( env_key="AYON_WORKFILE_TOOL_ON_START", default=None) @@ -2621,11 +2627,11 @@ class NukeDirmap(HostDirmap): class DirmapCache: - """Caching class to get settings and sync_module easily and only once.""" + """Caching class to get settings and sitesync easily and only once.""" _project_name = None _project_settings = None - _sync_module_discovered = False - _sync_module = None + _sitesync_addon_discovered = False + _sitesync_addon = None _mapping = None @classmethod @@ -2641,11 +2647,11 @@ class DirmapCache: return cls._project_settings @classmethod - def sync_module(cls): - if not cls._sync_module_discovered: - cls._sync_module_discovered = True - cls._sync_module = AddonsManager().get("sync_server") - return cls._sync_module + def sitesync_addon(cls): + if not cls._sitesync_addon_discovered: + cls._sitesync_addon_discovered = True + cls._sitesync_addon = AddonsManager().get("sitesync") + return cls._sitesync_addon @classmethod def mapping(cls): @@ -2667,7 +2673,7 @@ def dirmap_file_name_filter(file_name): "nuke", DirmapCache.project_name(), DirmapCache.project_settings(), - DirmapCache.sync_module(), + DirmapCache.sitesync_addon(), ) if not DirmapCache.mapping(): DirmapCache.set_mapping(dirmap_processor.get_mappings()) diff --git a/client/ayon_core/hosts/nuke/api/pipeline.py b/client/ayon_core/hosts/nuke/api/pipeline.py index 2255276c56..0d44aba2f9 100644 --- a/client/ayon_core/hosts/nuke/api/pipeline.py +++ b/client/ayon_core/hosts/nuke/api/pipeline.py @@ -30,13 +30,11 @@ from ayon_core.tools.utils import host_tools from ayon_core.hosts.nuke import NUKE_ROOT_DIR from ayon_core.tools.workfile_template_build import open_template_ui -from .command import viewer_update_and_undo_stop from .lib import ( Context, ROOT_DATA_KNOB, INSTANCE_DATA_KNOB, get_main_window, - add_publish_knob, WorkfileSettings, # TODO: remove this once workfile builder will be removed process_workfile_builder, diff --git a/client/ayon_core/hosts/nuke/api/plugin.py b/client/ayon_core/hosts/nuke/api/plugin.py index 7f016d9c66..6aa098c558 100644 --- a/client/ayon_core/hosts/nuke/api/plugin.py +++ b/client/ayon_core/hosts/nuke/api/plugin.py @@ -6,7 +6,6 @@ import six import random import string from collections import OrderedDict, defaultdict -from abc import abstractmethod from ayon_core.settings import get_current_project_settings from ayon_core.lib import ( @@ -14,7 +13,6 @@ from ayon_core.lib import ( EnumDef ) from ayon_core.pipeline import ( - LegacyCreator, LoaderPlugin, CreatorError, Creator as NewCreator, @@ -34,18 +32,13 @@ from ayon_core.lib.transcoding import ( from .lib import ( INSTANCE_DATA_KNOB, Knobby, - check_product_name_exists, maintained_selection, get_avalon_knob_data, - set_avalon_knob_data, - add_publish_knob, - get_nuke_imageio_settings, set_node_knobs_from_settings, set_node_data, get_node_data, get_view_process_node, get_viewer_config_from_string, - deprecated, get_filenames_without_hash, link_knobs ) @@ -910,8 +903,8 @@ class ExporterReviewMov(ExporterReview): self._connect_to_above_nodes( node, product_name, "Reposition node... `{}`" ) - # append reformated tag - add_tags.append("reformated") + # append reformatted tag + add_tags.append("reformatted") # only create colorspace baking if toggled on if bake_viewer_process: @@ -1114,7 +1107,7 @@ def convert_to_valid_instaces(): transfer_data["active"] = ( node["publish"].value()) - # add idetifier + # add identifier transfer_data["creator_identifier"] = product_type_to_identifier( product_type ) diff --git a/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py b/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py index 2f6d121af5..afef3ba843 100644 --- a/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py +++ b/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook +from ayon_applications import PreLaunchHook class PrelaunchNukeAssistHook(PreLaunchHook): diff --git a/client/ayon_core/hosts/nuke/plugins/publish/extract_camera.py b/client/ayon_core/hosts/nuke/plugins/publish/extract_camera.py index 1f5a8c73e1..a1a5acb63b 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/extract_camera.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/extract_camera.py @@ -1,6 +1,5 @@ import os import math -from pprint import pformat import nuke diff --git a/client/ayon_core/hosts/nuke/plugins/publish/extract_script_save.py b/client/ayon_core/hosts/nuke/plugins/publish/extract_script_save.py index e44e5686b6..d325684a7c 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/extract_script_save.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/extract_script_save.py @@ -2,10 +2,10 @@ import nuke import pyblish.api -class ExtractScriptSave(pyblish.api.Extractor): +class ExtractScriptSave(pyblish.api.InstancePlugin): """Save current Nuke workfile script""" label = 'Script Save' - order = pyblish.api.Extractor.order - 0.1 + order = pyblish.api.ExtractorOrder - 0.1 hosts = ['nuke'] def process(self, instance): diff --git a/client/ayon_core/hosts/nuke/plugins/publish/validate_knobs.py b/client/ayon_core/hosts/nuke/plugins/publish/validate_knobs.py index 281e172788..8bcde9609d 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/validate_knobs.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/validate_knobs.py @@ -1,3 +1,5 @@ +import json + import nuke import six import pyblish.api diff --git a/client/ayon_core/hosts/nuke/plugins/publish/validate_rendered_frames.py b/client/ayon_core/hosts/nuke/plugins/publish/validate_rendered_frames.py index 852267f68c..76ac7e97ad 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -1,6 +1,6 @@ -import os import pyblish.api import clique + from ayon_core.pipeline import PublishXmlValidationError from ayon_core.pipeline.publish import get_errored_instances_from_context diff --git a/client/ayon_core/hosts/photoshop/api/launch_logic.py b/client/ayon_core/hosts/photoshop/api/launch_logic.py index d0823646d7..c388f93044 100644 --- a/client/ayon_core/hosts/photoshop/api/launch_logic.py +++ b/client/ayon_core/hosts/photoshop/api/launch_logic.py @@ -11,7 +11,7 @@ from wsrpc_aiohttp import ( import ayon_api from qtpy import QtCore -from ayon_core.lib import Logger, StringTemplate +from ayon_core.lib import Logger from ayon_core.pipeline import ( registered_host, Anatomy, diff --git a/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py b/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py index 8358c11ca1..70f8fc730f 100644 --- a/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py +++ b/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py @@ -6,10 +6,7 @@ from ayon_core.lib import ( get_ayon_launcher_args, is_using_ayon_console, ) -from ayon_core.lib.applications import ( - PreLaunchHook, - LaunchTypes, -) +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.photoshop import get_launch_script_path diff --git a/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py b/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py index 25b22f53a4..73e8c3683c 100644 --- a/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py +++ b/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py @@ -19,7 +19,7 @@ class ImageFromSequenceLoader(photoshop.PhotoshopLoader): This loader will be triggered multiple times, but selected name will match only to proper path. - Loader doesnt do containerization as there is currently no data model + Loader doesn't do containerization as there is currently no data model of 'frame of rendered files' (only rendered sequence), update would be difficult. """ diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/closePS.py b/client/ayon_core/hosts/photoshop/plugins/publish/closePS.py index 6f86d98580..68c3b5b249 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/closePS.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/closePS.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- """Close PS after publish. For Webpublishing only.""" -import os - import pyblish.api from ayon_core.hosts.photoshop import api as photoshop diff --git a/client/ayon_core/hosts/resolve/api/lib.py b/client/ayon_core/hosts/resolve/api/lib.py index a60f3cd4ec..b9ad81c79d 100644 --- a/client/ayon_core/hosts/resolve/api/lib.py +++ b/client/ayon_core/hosts/resolve/api/lib.py @@ -925,7 +925,7 @@ def get_reformated_path(path, padded=False, first=False): path (str): path url or simple file name Returns: - type: string with reformated path + type: string with reformatted path Example: get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr diff --git a/client/ayon_core/hosts/resolve/api/menu.py b/client/ayon_core/hosts/resolve/api/menu.py index dd8573acc0..fc2c15ad6d 100644 --- a/client/ayon_core/hosts/resolve/api/menu.py +++ b/client/ayon_core/hosts/resolve/api/menu.py @@ -48,6 +48,7 @@ class AYONMenu(QtWidgets.QWidget): QtCore.Qt.Window | QtCore.Qt.CustomizeWindowHint | QtCore.Qt.WindowTitleHint + | QtCore.Qt.WindowMinimizeButtonHint | QtCore.Qt.WindowCloseButtonHint | QtCore.Qt.WindowStaysOnTopHint ) diff --git a/client/ayon_core/hosts/resolve/api/plugin.py b/client/ayon_core/hosts/resolve/api/plugin.py index d4c2d919a2..0b339cdf7c 100644 --- a/client/ayon_core/hosts/resolve/api/plugin.py +++ b/client/ayon_core/hosts/resolve/api/plugin.py @@ -875,14 +875,14 @@ class PublishClip: def _convert_to_entity(self, key): """ Converting input key to key with type. """ # convert to entity type - entity_type = self.types.get(key) + folder_type = self.types.get(key) - assert entity_type, "Missing entity type for `{}`".format( + assert folder_type, "Missing folder type for `{}`".format( key ) return { - "entity_type": entity_type, + "folder_type": folder_type, "entity_name": self.hierarchy_data[key]["value"].format( **self.timeline_item_default_data ) diff --git a/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py b/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py index d82651289c..cf9953bfe9 100644 --- a/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py +++ b/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class PreLaunchResolveLastWorkfile(PreLaunchHook): diff --git a/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py b/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py index c14fd75b2f..f45e28d5ab 100644 --- a/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py +++ b/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py @@ -1,7 +1,7 @@ import os from pathlib import Path import platform -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.resolve.utils import setup diff --git a/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py b/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py index ab16053450..300564f7cc 100644 --- a/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py +++ b/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py @@ -1,6 +1,6 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes import ayon_core.hosts.resolve diff --git a/client/ayon_core/hosts/resolve/otio/utils.py b/client/ayon_core/hosts/resolve/otio/utils.py index 7d8089e055..c03305ff23 100644 --- a/client/ayon_core/hosts/resolve/otio/utils.py +++ b/client/ayon_core/hosts/resolve/otio/utils.py @@ -25,7 +25,7 @@ def get_reformated_path(path, padded=True, first=False): path (str): path url or simple file name Returns: - type: string with reformated path + type: string with reformatted path Example: get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr diff --git a/client/ayon_core/hosts/resolve/plugins/create/create_shot_clip.py b/client/ayon_core/hosts/resolve/plugins/create/create_shot_clip.py index 3a2a0345ea..cbc03da3b6 100644 --- a/client/ayon_core/hosts/resolve/plugins/create/create_shot_clip.py +++ b/client/ayon_core/hosts/resolve/plugins/create/create_shot_clip.py @@ -166,7 +166,7 @@ class CreateShotClip(plugin.Creator): "type": "QCheckBox", "label": "Source resolution", "target": "tag", - "toolTip": "Is resloution taken from timeline or source?", # noqa + "toolTip": "Is resolution taken from timeline or source?", # noqa "order": 4}, } }, @@ -207,7 +207,7 @@ class CreateShotClip(plugin.Creator): presets = None def process(self): - # get key pares from presets and match it on ui inputs + # get key pairs from presets and match it on ui inputs for k, v in self.gui_inputs.items(): if v["type"] in ("dict", "section"): # nested dictionary (only one level allowed diff --git a/client/ayon_core/hosts/resolve/plugins/publish/precollect_instances.py b/client/ayon_core/hosts/resolve/plugins/publish/precollect_instances.py index 72ecd3669d..caa79c85c0 100644 --- a/client/ayon_core/hosts/resolve/plugins/publish/precollect_instances.py +++ b/client/ayon_core/hosts/resolve/plugins/publish/precollect_instances.py @@ -64,6 +64,11 @@ class PrecollectInstances(pyblish.api.ContextPlugin): }) folder_path = tag_data["folder_path"] + # Backward compatibility fix of 'entity_type' > 'folder_type' + if "parents" in data: + for parent in data["parents"]: + if "entity_type" in parent: + parent["folder_type"] = parent.pop("entity_type") # TODO: remove backward compatibility product_name = tag_data.get("productName") diff --git a/client/ayon_core/hosts/substancepainter/api/pipeline.py b/client/ayon_core/hosts/substancepainter/api/pipeline.py index c75cc3135a..23d629533c 100644 --- a/client/ayon_core/hosts/substancepainter/api/pipeline.py +++ b/client/ayon_core/hosts/substancepainter/api/pipeline.py @@ -12,17 +12,14 @@ import substance_painter.project import pyblish.api from ayon_core.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost -from ayon_core.settings import ( - get_current_project_settings, - get_project_settings, -) +from ayon_core.settings import get_current_project_settings from ayon_core.pipeline.template_data import get_template_data_with_names from ayon_core.pipeline import ( register_creator_plugin_path, register_loader_plugin_path, AVALON_CONTAINER_ID, - Anatomy + Anatomy, ) from ayon_core.lib import ( StringTemplate, diff --git a/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py b/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py index f2254c0907..d940d7b05c 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py +++ b/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py @@ -18,7 +18,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): """Load mesh for project""" product_types = {"*"} - representations = ["abc", "fbx", "obj", "gltf"] + representations = ["abc", "fbx", "obj", "gltf", "usd", "usda", "usdc"] label = "Load mesh" order = -10 diff --git a/client/ayon_core/hosts/traypublisher/api/editorial.py b/client/ayon_core/hosts/traypublisher/api/editorial.py index 8dedec7398..c71dae336c 100644 --- a/client/ayon_core/hosts/traypublisher/api/editorial.py +++ b/client/ayon_core/hosts/traypublisher/api/editorial.py @@ -186,14 +186,15 @@ class ShotMetadataSolver: # in case first parent is project then start parents from start if ( _index == 0 - and parent_token_type == "project" + and parent_token_type.lower() == "project" ): project_parent = parents[0] parents = [project_parent] continue parents.append({ - "entity_type": parent_token_type, + "entity_type": "folder", + "folder_type": parent_token_type.lower(), "entity_name": parent_name }) @@ -264,7 +265,8 @@ class ShotMetadataSolver: }] for entity in folders_hierarchy: output.append({ - "entity_type": entity["folderType"], + "entity_type": "folder", + "folder_type": entity["folderType"], "entity_name": entity["name"] }) return output diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py index a9ee343dfb..843729786c 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py @@ -675,7 +675,7 @@ or updating already created. Publishing will create OTIO file. base_instance_data = { "shotName": shot_name, "variant": variant_name, - "task": "", + "task": None, "newAssetPublishing": True, "trackStartFrame": track_start_frame, "timelineOffset": timeline_offset, diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_shot_instances.py b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_shot_instances.py index edcbb27cb3..5a2f5cbc20 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_shot_instances.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_shot_instances.py @@ -154,8 +154,9 @@ class CollectShotInstance(pyblish.api.InstancePlugin): handle_end = int(instance.data["handleEnd"]) in_info = { - "entity_type": "Shot", - "custom_attributes": { + "entity_type": "folder", + "folder_type": "Shot", + "attributes": { "handleStart": handle_start, "handleEnd": handle_end, "frameStart": instance.data["frameStart"], @@ -174,13 +175,13 @@ class CollectShotInstance(pyblish.api.InstancePlugin): for parent in reversed(parents): parent_name = parent["entity_name"] - next_dict = { - parent_name: { - "entity_type": parent["entity_type"], - "childs": actual - } + parent_info = { + "entity_type": parent["entity_type"], + "children": actual, } - actual = next_dict + if parent_info["entity_type"] == "folder": + parent_info["folder_type"] = parent["folder_type"] + actual = {parent_name: parent_info} final_context = self._update_dict(final_context, actual) diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py index e5bf034d00..4f11571efe 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py @@ -20,7 +20,7 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, optional = True # published data might be sequence (.mov, .mp4) in that counting files - # doesnt make sense + # doesn't make sense check_extensions = ["exr", "dpx", "jpg", "jpeg", "png", "tiff", "tga", "gif", "svg"] skip_timelines_check = [] # skip for specific task names (regex) diff --git a/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py b/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py index 25e324c5cc..691b81e089 100644 --- a/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py +++ b/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py @@ -1,5 +1,5 @@ from ayon_core.lib import get_ayon_launcher_args -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class TvpaintPrelaunchHook(PreLaunchHook): diff --git a/client/ayon_core/hosts/tvpaint/plugins/create/create_render.py b/client/ayon_core/hosts/tvpaint/plugins/create/create_render.py index 8d91afc74e..dc9c2466e0 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/create/create_render.py +++ b/client/ayon_core/hosts/tvpaint/plugins/create/create_render.py @@ -599,7 +599,7 @@ class CreateRenderPass(TVPaintCreator): if filtered_layers: self.log.info(( "Changing group of " - f"{','.join([l['name'] for l in filtered_layers])}" + f"{','.join([layer['name'] for layer in filtered_layers])}" f" to {group_id}" )) george_lines = [ @@ -760,7 +760,9 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): grg_lines: list[str] = [] for group_id, group_name in new_group_name_by_id.items(): group: dict[str, Any] = groups_by_id[group_id] - grg_line: str = "tv_layercolor \"setcolor\" {} {} {} {} {}".format( + grg_line: str = ( + "tv_layercolor \"setcolor\" {} {} {} {} {} \"{}\"" + ).format( group["clip_id"], group_id, group["red"], diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py b/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py index ab30e3dc10..fe5e148b7b 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -25,8 +25,9 @@ from ayon_core.hosts.tvpaint.lib import ( ) -class ExtractSequence(pyblish.api.Extractor): +class ExtractSequence(pyblish.api.InstancePlugin): label = "Extract Sequence" + order = pyblish.api.ExtractorOrder hosts = ["tvpaint"] families = ["review", "render"] diff --git a/client/ayon_core/hosts/unreal/api/tools_ui.py b/client/ayon_core/hosts/unreal/api/tools_ui.py index 084da9a0f0..efae5bb702 100644 --- a/client/ayon_core/hosts/unreal/api/tools_ui.py +++ b/client/ayon_core/hosts/unreal/api/tools_ui.py @@ -125,7 +125,7 @@ class WindowCache: @classmethod def _before_show(cls): - """Create QApplication if does not exists yet.""" + """Create QApplication if does not exist yet.""" if not cls._first_show: return diff --git a/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py b/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py index 54ffba3a63..e38591f65d 100644 --- a/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py +++ b/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py @@ -9,7 +9,7 @@ from pathlib import Path from qtpy import QtCore from ayon_core import resources -from ayon_core.lib.applications import ( +from ayon_applications import ( PreLaunchHook, ApplicationLaunchFailed, LaunchTypes, diff --git a/client/ayon_core/hosts/unreal/lib.py b/client/ayon_core/hosts/unreal/lib.py index fe9e239ed5..37122b2096 100644 --- a/client/ayon_core/hosts/unreal/lib.py +++ b/client/ayon_core/hosts/unreal/lib.py @@ -216,10 +216,8 @@ def create_unreal_project(project_name: str, since 3.16.0 """ - env = env or os.environ preset = get_project_settings(project_name)["unreal"]["project_setup"] - ue_id = ".".join(ue_version.split(".")[:2]) # get unreal engine identifier # ------------------------------------------------------------------------- # FIXME (antirotor): As of 4.26 this is problem with UE4 built from @@ -238,10 +236,12 @@ def create_unreal_project(project_name: str, project_file = pr_dir / f"{unreal_project_name}.uproject" print("--- Generating a new project ...") - commandlet_cmd = [f'{ue_editor_exe.as_posix()}', - f'{cmdlet_project.as_posix()}', - f'-run=AyonGenerateProject', - f'{project_file.resolve().as_posix()}'] + commandlet_cmd = [ + ue_editor_exe.as_posix(), + cmdlet_project.as_posix(), + "-run=AyonGenerateProject", + project_file.resolve().as_posix() + ] if dev_mode or preset["dev_mode"]: commandlet_cmd.append('-GenerateCode') @@ -268,7 +268,7 @@ def create_unreal_project(project_name: str, pf.seek(0) json.dump(pf_json, pf, indent=4) pf.truncate() - print(f'--- Engine ID has been written into the project file') + print("--- Engine ID has been written into the project file") if dev_mode or preset["dev_mode"]: u_build_tool = get_path_to_ubt(engine_path, ue_version) @@ -282,17 +282,25 @@ def create_unreal_project(project_name: str, # we need to test this out arch = "Mac" - command1 = [u_build_tool.as_posix(), "-projectfiles", - f"-project={project_file}", "-progress"] + command1 = [ + u_build_tool.as_posix(), + "-projectfiles", + f"-project={project_file}", + "-progress" + ] subprocess.run(command1) - command2 = [u_build_tool.as_posix(), - f"-ModuleWithSuffix={unreal_project_name},3555", arch, - "Development", "-TargetType=Editor", - f'-Project={project_file}', - f'{project_file}', - "-IgnoreJunk"] + command2 = [ + u_build_tool.as_posix(), + f"-ModuleWithSuffix={unreal_project_name},3555", + arch, + "Development", + "-TargetType=Editor", + f"-Project={project_file}", + project_file, + "-IgnoreJunk" + ] subprocess.run(command2) diff --git a/client/ayon_core/hosts/unreal/plugins/create/create_render.py b/client/ayon_core/hosts/unreal/plugins/create/create_render.py index cbec84c543..5a96d9809c 100644 --- a/client/ayon_core/hosts/unreal/plugins/create/create_render.py +++ b/client/ayon_core/hosts/unreal/plugins/create/create_render.py @@ -50,7 +50,7 @@ class CreateRender(UnrealAssetCreator): # If the option to create a new level sequence is selected, # create a new level sequence and a master level. - root = f"/Game/Ayon/Sequences" + root = "/Game/Ayon/Sequences" # Create a new folder for the sequence in root sequence_dir_name = create_folder(root, product_name) @@ -166,7 +166,7 @@ class CreateRender(UnrealAssetCreator): master_lvl = levels[0].get_asset().get_path_name() except IndexError: raise RuntimeError( - f"Could not find the hierarchy for the selected sequence.") + "Could not find the hierarchy for the selected sequence.") # If the selected asset is the master sequence, we get its data # and then we create the instance for the master sequence. diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_animation.py b/client/ayon_core/hosts/unreal/plugins/load/load_animation.py index 0f51ac39e0..59b9f66b78 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_animation.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_animation.py @@ -8,7 +8,7 @@ from unreal import EditorAssetLibrary from unreal import MovieSceneSkeletalAnimationTrack from unreal import MovieSceneSkeletalAnimationSection -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.pipeline import ( get_representation_path, AYON_CONTAINER_ID @@ -53,7 +53,7 @@ class AnimationFBXLoader(plugin.Loader): if not actor: return None - folder_entity = get_current_project_folder(fields=["attrib.fps"]) + folder_entity = get_current_folder_entity(fields=["attrib.fps"]) task.set_editor_property('filename', path) task.set_editor_property('destination_path', asset_dir) @@ -256,7 +256,7 @@ class AnimationFBXLoader(plugin.Loader): repre_entity = context["representation"] folder_name = container["asset_name"] source_path = get_representation_path(repre_entity) - folder_entity = get_current_project_folder(fields=["attrib.fps"]) + folder_entity = get_current_folder_entity(fields=["attrib.fps"]) destination_path = container["namespace"] task = unreal.AssetImportTask() diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_layout.py b/client/ayon_core/hosts/unreal/plugins/load/load_layout.py index 6c01925453..b0f09ee8b0 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_layout.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_layout.py @@ -25,7 +25,7 @@ from ayon_core.pipeline import ( AYON_CONTAINER_ID, get_current_project_name, ) -from ayon_core.pipeline.context_tools import get_current_project_folder +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.settings import get_current_project_settings from ayon_core.hosts.unreal.api import plugin from ayon_core.hosts.unreal.api.pipeline import ( @@ -169,7 +169,7 @@ class LayoutLoader(plugin.Loader): anim_path = f"{asset_dir}/animations/{anim_file_name}" - folder_entity = get_current_project_folder() + folder_entity = get_current_folder_entity() # Import animation task = unreal.AssetImportTask() task.options = unreal.FbxImportUI() diff --git a/client/ayon_core/hosts/unreal/plugins/publish/collect_render_instances.py b/client/ayon_core/hosts/unreal/plugins/publish/collect_render_instances.py index ea53f221ea..ce2a03155b 100644 --- a/client/ayon_core/hosts/unreal/plugins/publish/collect_render_instances.py +++ b/client/ayon_core/hosts/unreal/plugins/publish/collect_render_instances.py @@ -1,12 +1,11 @@ -import os from pathlib import Path import unreal +import pyblish.api from ayon_core.pipeline import get_current_project_name from ayon_core.pipeline import Anatomy from ayon_core.hosts.unreal.api import pipeline -import pyblish.api class CollectRenderInstances(pyblish.api.InstancePlugin): diff --git a/client/ayon_core/lib/__init__.py b/client/ayon_core/lib/__init__.py index 2ee7eecfe3..408262ca42 100644 --- a/client/ayon_core/lib/__init__.py +++ b/client/ayon_core/lib/__init__.py @@ -120,22 +120,6 @@ from .transcoding import ( get_rescaled_command_arguments, ) -from .applications import ( - ApplicationLaunchFailed, - ApplictionExecutableNotFound, - ApplicationNotFound, - ApplicationManager, - - PreLaunchHook, - PostLaunchHook, - - EnvironmentPrepData, - prepare_app_environments, - prepare_context_environments, - get_app_environments_for_context, - apply_project_environments_value -) - from .plugin_tools import ( prepare_template_data, source_hash, @@ -231,18 +215,6 @@ __all__ = [ "convert_ffprobe_fps_to_float", "get_rescaled_command_arguments", - "ApplicationLaunchFailed", - "ApplictionExecutableNotFound", - "ApplicationNotFound", - "ApplicationManager", - "PreLaunchHook", - "PostLaunchHook", - "EnvironmentPrepData", - "prepare_app_environments", - "prepare_context_environments", - "get_app_environments_for_context", - "apply_project_environments_value", - "compile_list_of_regexes", "filter_profiles", diff --git a/client/ayon_core/lib/applications.py b/client/ayon_core/lib/applications.py deleted file mode 100644 index 2db32cbfaa..0000000000 --- a/client/ayon_core/lib/applications.py +++ /dev/null @@ -1,1893 +0,0 @@ -import os -import sys -import copy -import json -import tempfile -import platform -import collections -import inspect -import subprocess -from abc import ABCMeta, abstractmethod - -import six - -from ayon_core import AYON_CORE_ROOT -from ayon_core.settings import get_project_settings, get_studio_settings -from .log import Logger -from .profiles_filtering import filter_profiles -from .local_settings import get_ayon_username - -from .python_module_tools import ( - modules_from_path, - classes_from_module -) -from .execute import ( - find_executable, - get_linux_launcher_args -) - -_logger = None - -PLATFORM_NAMES = {"windows", "linux", "darwin"} -DEFAULT_ENV_SUBGROUP = "standard" -CUSTOM_LAUNCH_APP_GROUPS = { - "djvview" -} - - -class LaunchTypes: - """Launch types are filters for pre/post-launch hooks. - - Please use these variables in case they'll change values. - """ - - # Local launch - application is launched on local machine - local = "local" - # Farm render job - application is on farm - farm_render = "farm-render" - # Farm publish job - integration post-render job - farm_publish = "farm-publish" - # Remote launch - application is launched on remote machine from which - # can be started publishing - remote = "remote" - # Automated launch - application is launched with automated publishing - automated = "automated" - - -def parse_environments(env_data, env_group=None, platform_name=None): - """Parse environment values from settings byt group and platform. - - Data may contain up to 2 hierarchical levels of dictionaries. At the end - of the last level must be string or list. List is joined using platform - specific joiner (';' for windows and ':' for linux and mac). - - Hierarchical levels can contain keys for subgroups and platform name. - Platform specific values must be always last level of dictionary. Platform - names are "windows" (MS Windows), "linux" (any linux distribution) and - "darwin" (any MacOS distribution). - - Subgroups are helpers added mainly for standard and on farm usage. Farm - may require different environments for e.g. licence related values or - plugins. Default subgroup is "standard". - - Examples: - ``` - { - # Unchanged value - "ENV_KEY1": "value", - # Empty values are kept (unset environment variable) - "ENV_KEY2": "", - - # Join list values with ':' or ';' - "ENV_KEY3": ["value1", "value2"], - - # Environment groups - "ENV_KEY4": { - "standard": "DEMO_SERVER_URL", - "farm": "LICENCE_SERVER_URL" - }, - - # Platform specific (and only for windows and mac) - "ENV_KEY5": { - "windows": "windows value", - "darwin": ["value 1", "value 2"] - }, - - # Environment groups and platform combination - "ENV_KEY6": { - "farm": "FARM_VALUE", - "standard": { - "windows": ["value1", "value2"], - "linux": "value1", - "darwin": "" - } - } - } - ``` - """ - output = {} - if not env_data: - return output - - if not env_group: - env_group = DEFAULT_ENV_SUBGROUP - - if not platform_name: - platform_name = platform.system().lower() - - for key, value in env_data.items(): - if isinstance(value, dict): - # Look if any key is platform key - # - expect that represents environment group if does not contain - # platform keys - if not PLATFORM_NAMES.intersection(set(value.keys())): - # Skip the key if group is not available - if env_group not in value: - continue - value = value[env_group] - - # Check again if value is dictionary - # - this time there should be only platform keys - if isinstance(value, dict): - value = value.get(platform_name) - - # Check if value is list and join it's values - # QUESTION Should empty values be skipped? - if isinstance(value, (list, tuple)): - value = os.pathsep.join(value) - - # Set key to output if value is string - if isinstance(value, six.string_types): - output[key] = value - return output - - -def get_logger(): - """Global lib.applications logger getter.""" - global _logger - if _logger is None: - _logger = Logger.get_logger(__name__) - return _logger - - -class ApplicationNotFound(Exception): - """Application was not found in ApplicationManager by name.""" - - def __init__(self, app_name): - self.app_name = app_name - super(ApplicationNotFound, self).__init__( - "Application \"{}\" was not found.".format(app_name) - ) - - -class ApplictionExecutableNotFound(Exception): - """Defined executable paths are not available on the machine.""" - - def __init__(self, application): - self.application = application - details = None - if not application.executables: - msg = ( - "Executable paths for application \"{}\"({}) are not set." - ) - else: - msg = ( - "Defined executable paths for application \"{}\"({})" - " are not available on this machine." - ) - details = "Defined paths:" - for executable in application.executables: - details += "\n- " + executable.executable_path - - self.msg = msg.format(application.full_label, application.full_name) - self.details = details - - exc_mgs = str(self.msg) - if details: - # Is good idea to pass new line symbol to exception message? - exc_mgs += "\n" + details - self.exc_msg = exc_mgs - super(ApplictionExecutableNotFound, self).__init__(exc_mgs) - - -class ApplicationLaunchFailed(Exception): - """Application launch failed due to known reason. - - Message should be self explanatory as traceback won't be shown. - """ - pass - - -class ApplicationGroup: - """Hold information about application group. - - Application group wraps different versions(variants) of application. - e.g. "maya" is group and "maya_2020" is variant. - - Group hold `host_name` which is implementation name used in AYON. Also - holds `enabled` if whole app group is enabled or `icon` for application - icon path in resources. - - Group has also `environment` which hold same environments for all variants. - - Args: - name (str): Groups' name. - data (dict): Group defying data loaded from settings. - manager (ApplicationManager): Manager that created the group. - """ - - def __init__(self, name, data, manager): - self.name = name - self.manager = manager - self._data = data - - self.enabled = data["enabled"] - self.label = data["label"] or None - self.icon = data["icon"] or None - env = {} - try: - env = json.loads(data["environment"]) - except Exception: - pass - self._environment = env - - host_name = data["host_name"] or None - self.is_host = host_name is not None - self.host_name = host_name - - settings_variants = data["variants"] - variants = {} - for variant_data in settings_variants: - app_variant = Application(variant_data, self) - variants[app_variant.name] = app_variant - - self.variants = variants - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.name) - - def __iter__(self): - for variant in self.variants.values(): - yield variant - - @property - def environment(self): - return copy.deepcopy(self._environment) - - -class Application: - """Hold information about application. - - Object by itself does nothing special. - - Args: - data (dict): Data for the version containing information about - executables, variant label or if is enabled. - Only required key is `executables`. - group (ApplicationGroup): App group object that created the application - and under which application belongs. - - """ - def __init__(self, data, group): - self._data = data - name = data["name"] - label = data["label"] or name - enabled = False - if group.enabled: - enabled = data.get("enabled", True) - - if group.label: - full_label = " ".join((group.label, label)) - else: - full_label = label - env = {} - try: - env = json.loads(data["environment"]) - except Exception: - pass - - arguments = data["arguments"] - if isinstance(arguments, dict): - arguments = arguments.get(platform.system().lower()) - - if not arguments: - arguments = [] - - _executables = data["executables"].get(platform.system().lower(), []) - executables = [ - ApplicationExecutable(executable) - for executable in _executables - ] - - self.group = group - - self.name = name - self.label = label - self.enabled = enabled - self.use_python_2 = data.get("use_python_2", False) - - self.full_name = "/".join((group.name, name)) - self.full_label = full_label - self.arguments = arguments - self.executables = executables - self._environment = env - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.full_name) - - @property - def environment(self): - return copy.deepcopy(self._environment) - - @property - def manager(self): - return self.group.manager - - @property - def host_name(self): - return self.group.host_name - - @property - def icon(self): - return self.group.icon - - @property - def is_host(self): - return self.group.is_host - - def find_executable(self): - """Try to find existing executable for application. - - Returns (str): Path to executable from `executables` or None if any - exists. - """ - for executable in self.executables: - if executable.exists(): - return executable - return None - - def launch(self, *args, **kwargs): - """Launch the application. - - For this purpose is used manager's launch method to keep logic at one - place. - - Arguments must match with manager's launch method. That's why *args - **kwargs are used. - - Returns: - subprocess.Popen: Return executed process as Popen object. - """ - return self.manager.launch(self.full_name, *args, **kwargs) - - -class ApplicationManager: - """Load applications and tools and store them by their full name. - - Args: - studio_settings (dict): Preloaded studio settings. When passed manager - will always use these values. Gives ability to create manager - using different settings. - """ - - def __init__(self, studio_settings=None): - self.log = Logger.get_logger(self.__class__.__name__) - - self.app_groups = {} - self.applications = {} - self.tool_groups = {} - self.tools = {} - - self._studio_settings = studio_settings - - self.refresh() - - def set_studio_settings(self, studio_settings): - """Ability to change init system settings. - - This will trigger refresh of manager. - """ - self._studio_settings = studio_settings - - self.refresh() - - def refresh(self): - """Refresh applications from settings.""" - self.app_groups.clear() - self.applications.clear() - self.tool_groups.clear() - self.tools.clear() - - if self._studio_settings is not None: - settings = copy.deepcopy(self._studio_settings) - else: - settings = get_studio_settings( - clear_metadata=False, exclude_locals=False - ) - - applications_addon_settings = settings["applications"] - - # Prepare known applications - app_defs = applications_addon_settings["applications"] - additional_apps = app_defs.pop("additional_apps") - for additional_app in additional_apps: - app_name = additional_app.pop("name") - if app_name in app_defs: - self.log.warning(( - "Additional application '{}' is already" - " in built-in applications." - ).format(app_name)) - app_defs[app_name] = additional_app - - for group_name, variant_defs in app_defs.items(): - group = ApplicationGroup(group_name, variant_defs, self) - self.app_groups[group_name] = group - for app in group: - self.applications[app.full_name] = app - - tools_definitions = applications_addon_settings["tool_groups"] - for tool_group_data in tools_definitions: - group = EnvironmentToolGroup(tool_group_data, self) - self.tool_groups[group.name] = group - for tool in group: - self.tools[tool.full_name] = tool - - def find_latest_available_variant_for_group(self, group_name): - group = self.app_groups.get(group_name) - if group is None or not group.enabled: - return None - - output = None - for _, variant in reversed(sorted(group.variants.items())): - executable = variant.find_executable() - if executable: - output = variant - break - return output - - def create_launch_context(self, app_name, **data): - """Prepare launch context for application. - - Args: - app_name (str): Name of application that should be launched. - **data (Any): Any additional data. Data may be used during - - Returns: - ApplicationLaunchContext: Launch context for application. - - Raises: - ApplicationNotFound: Application was not found by entered name. - """ - - app = self.applications.get(app_name) - if not app: - raise ApplicationNotFound(app_name) - - executable = app.find_executable() - - return ApplicationLaunchContext( - app, executable, **data - ) - - def launch_with_context(self, launch_context): - """Launch application using existing launch context. - - Args: - launch_context (ApplicationLaunchContext): Prepared launch - context. - """ - - if not launch_context.executable: - raise ApplictionExecutableNotFound(launch_context.application) - return launch_context.launch() - - def launch(self, app_name, **data): - """Launch procedure. - - For host application it's expected to contain "project_name", - "folder_path" and "task_name". - - Args: - app_name (str): Name of application that should be launched. - **data (dict): Any additional data. Data may be used during - preparation to store objects usable in multiple places. - - Raises: - ApplicationNotFound: Application was not found by entered - argument `app_name`. - ApplictionExecutableNotFound: Executables in application definition - were not found on this machine. - ApplicationLaunchFailed: Something important for application launch - failed. Exception should contain explanation message, - traceback should not be needed. - """ - - context = self.create_launch_context(app_name, **data) - return self.launch_with_context(context) - - - -class EnvironmentToolGroup: - """Hold information about environment tool group. - - Environment tool group may hold different variants of same tool and set - environments that are same for all of them. - - e.g. "mtoa" may have different versions but all environments except one - are same. - - Args: - data (dict): Group information with variants. - manager (ApplicationManager): Manager that creates the group. - """ - - def __init__(self, data, manager): - name = data["name"] - label = data["label"] - - self.name = name - self.label = label - self._data = data - self.manager = manager - - environment = {} - try: - environment = json.loads(data["environment"]) - except Exception: - pass - self._environment = environment - - variants = data.get("variants") or [] - variants_by_name = {} - for variant_data in variants: - tool = EnvironmentTool(variant_data, self) - variants_by_name[tool.name] = tool - self.variants = variants_by_name - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.name) - - def __iter__(self): - for variant in self.variants.values(): - yield variant - - @property - def environment(self): - return copy.deepcopy(self._environment) - - -class EnvironmentTool: - """Hold information about application tool. - - Structure of tool information. - - Args: - variant_data (dict): Variant data with environments and - host and app variant filters. - group (EnvironmentToolGroup): Name of group which wraps tool. - """ - - def __init__(self, variant_data, group): - # Backwards compatibility 3.9.1 - 3.9.2 - # - 'variant_data' contained only environments but contain also host - # and application variant filters - name = variant_data["name"] - label = variant_data["label"] - host_names = variant_data["host_names"] - app_variants = variant_data["app_variants"] - - environment = {} - try: - environment = json.loads(variant_data["environment"]) - except Exception: - pass - - self.host_names = host_names - self.app_variants = app_variants - self.name = name - self.variant_label = label - self.label = " ".join((group.label, label)) - self.group = group - - self._environment = environment - self.full_name = "/".join((group.name, name)) - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.full_name) - - @property - def environment(self): - return copy.deepcopy(self._environment) - - def is_valid_for_app(self, app): - """Is tool valid for application. - - Args: - app (Application): Application for which are prepared environments. - """ - if self.app_variants and app.full_name not in self.app_variants: - return False - - if self.host_names and app.host_name not in self.host_names: - return False - return True - - -class ApplicationExecutable: - """Representation of executable loaded from settings.""" - - def __init__(self, executable): - # Try to format executable with environments - try: - executable = executable.format(**os.environ) - except Exception: - pass - - # On MacOS check if exists path to executable when ends with `.app` - # - it is common that path will lead to "/Applications/Blender" but - # real path is "/Applications/Blender.app" - if platform.system().lower() == "darwin": - executable = self.macos_executable_prep(executable) - - self.executable_path = executable - - def __str__(self): - return self.executable_path - - def __repr__(self): - return "<{}> {}".format(self.__class__.__name__, self.executable_path) - - @staticmethod - def macos_executable_prep(executable): - """Try to find full path to executable file. - - Real executable is stored in '*.app/Contents/MacOS/'. - - Having path to '*.app' gives ability to read it's plist info and - use "CFBundleExecutable" key from plist to know what is "executable." - - Plist is stored in '*.app/Contents/Info.plist'. - - This is because some '*.app' directories don't have same permissions - as real executable. - """ - # Try to find if there is `.app` file - if not os.path.exists(executable): - _executable = executable + ".app" - if os.path.exists(_executable): - executable = _executable - - # Try to find real executable if executable has `Contents` subfolder - contents_dir = os.path.join(executable, "Contents") - if os.path.exists(contents_dir): - executable_filename = None - # Load plist file and check for bundle executable - plist_filepath = os.path.join(contents_dir, "Info.plist") - if os.path.exists(plist_filepath): - import plistlib - - if hasattr(plistlib, "load"): - with open(plist_filepath, "rb") as stream: - parsed_plist = plistlib.load(stream) - else: - parsed_plist = plistlib.readPlist(plist_filepath) - executable_filename = parsed_plist.get("CFBundleExecutable") - - if executable_filename: - executable = os.path.join( - contents_dir, "MacOS", executable_filename - ) - - return executable - - def as_args(self): - return [self.executable_path] - - def _realpath(self): - """Check if path is valid executable path.""" - # Check for executable in PATH - result = find_executable(self.executable_path) - if result is not None: - return result - - # This is not 100% validation but it is better than remove ability to - # launch .bat, .sh or extentionless files - if os.path.exists(self.executable_path): - return self.executable_path - return None - - def exists(self): - if not self.executable_path: - return False - return bool(self._realpath()) - - -class UndefinedApplicationExecutable(ApplicationExecutable): - """Some applications do not require executable path from settings. - - In that case this class is used to "fake" existing executable. - """ - def __init__(self): - pass - - def __str__(self): - return self.__class__.__name__ - - def __repr__(self): - return "<{}>".format(self.__class__.__name__) - - def as_args(self): - return [] - - def exists(self): - return True - - -@six.add_metaclass(ABCMeta) -class LaunchHook: - """Abstract base class of launch hook.""" - # Order of prelaunch hook, will be executed as last if set to None. - order = None - # List of host implementations, skipped if empty. - hosts = set() - # Set of application groups - app_groups = set() - # Set of specific application names - app_names = set() - # Set of platform availability - platforms = set() - # Set of launch types for which is available - # - if empty then is available for all launch types - # - by default has 'local' which is most common reason for launc hooks - launch_types = {LaunchTypes.local} - - def __init__(self, launch_context): - """Constructor of launch hook. - - Always should be called - """ - self.log = Logger.get_logger(self.__class__.__name__) - - self.launch_context = launch_context - - is_valid = self.class_validation(launch_context) - if is_valid: - is_valid = self.validate() - - self.is_valid = is_valid - - @classmethod - def class_validation(cls, launch_context): - """Validation of class attributes by launch context. - - Args: - launch_context (ApplicationLaunchContext): Context of launching - application. - - Returns: - bool: Is launch hook valid for the context by class attributes. - """ - if cls.platforms: - low_platforms = tuple( - _platform.lower() - for _platform in cls.platforms - ) - if platform.system().lower() not in low_platforms: - return False - - if cls.hosts: - if launch_context.host_name not in cls.hosts: - return False - - if cls.app_groups: - if launch_context.app_group.name not in cls.app_groups: - return False - - if cls.app_names: - if launch_context.app_name not in cls.app_names: - return False - - if cls.launch_types: - if launch_context.launch_type not in cls.launch_types: - return False - - return True - - @property - def data(self): - return self.launch_context.data - - @property - def application(self): - return getattr(self.launch_context, "application", None) - - @property - def manager(self): - return getattr(self.application, "manager", None) - - @property - def host_name(self): - return getattr(self.application, "host_name", None) - - @property - def app_group(self): - return getattr(self.application, "group", None) - - @property - def app_name(self): - return getattr(self.application, "full_name", None) - - @property - def addons_manager(self): - return getattr(self.launch_context, "addons_manager", None) - - @property - def modules_manager(self): - """ - Deprecated: - Use 'addons_wrapper' instead. - """ - return self.addons_manager - - def validate(self): - """Optional validation of launch hook on initialization. - - Returns: - bool: Hook is valid (True) or invalid (False). - """ - # QUESTION Not sure if this method has any usable potential. - # - maybe result can be based on settings - return True - - @abstractmethod - def execute(self, *args, **kwargs): - """Abstract execute method where logic of hook is.""" - pass - - -class PreLaunchHook(LaunchHook): - """Abstract class of prelaunch hook. - - This launch hook will be processed before application is launched. - - If any exception will happen during processing the application won't be - launched. - """ - - -class PostLaunchHook(LaunchHook): - """Abstract class of postlaunch hook. - - This launch hook will be processed after application is launched. - - Nothing will happen if any exception will happen during processing. And - processing of other postlaunch hooks won't stop either. - """ - - -class ApplicationLaunchContext: - """Context of launching application. - - Main purpose of context is to prepare launch arguments and keyword - arguments for new process. Most important part of keyword arguments - preparations are environment variables. - - During the whole process is possible to use `data` attribute to store - object usable in multiple places. - - Launch arguments are strings in list. It is possible to "chain" argument - when order of them matters. That is possible to do with adding list where - order is right and should not change. - NOTE: This is recommendation, not requirement. - e.g.: `["nuke.exe", "--NukeX"]` -> In this case any part of process may - insert argument between `nuke.exe` and `--NukeX`. To keep them together - it is better to wrap them in another list: `[["nuke.exe", "--NukeX"]]`. - - Notes: - It is possible to use launch context only to prepare environment - variables. In that case `executable` may be None and can be used - 'run_prelaunch_hooks' method to run prelaunch hooks which prepare - them. - - Args: - application (Application): Application definition. - executable (ApplicationExecutable): Object with path to executable. - env_group (Optional[str]): Environment variable group. If not set - 'DEFAULT_ENV_SUBGROUP' is used. - launch_type (Optional[str]): Launch type. If not set 'local' is used. - **data (dict): Any additional data. Data may be used during - preparation to store objects usable in multiple places. - """ - - def __init__( - self, - application, - executable, - env_group=None, - launch_type=None, - **data - ): - from ayon_core.addon import AddonsManager - - # Application object - self.application = application - - self.addons_manager = AddonsManager() - - # Logger - logger_name = "{}-{}".format(self.__class__.__name__, - self.application.full_name) - self.log = Logger.get_logger(logger_name) - - self.executable = executable - - if launch_type is None: - launch_type = LaunchTypes.local - self.launch_type = launch_type - - if env_group is None: - env_group = DEFAULT_ENV_SUBGROUP - - self.env_group = env_group - - self.data = dict(data) - - launch_args = [] - if executable is not None: - launch_args = executable.as_args() - # subprocess.Popen launch arguments (first argument in constructor) - self.launch_args = launch_args - self.launch_args.extend(application.arguments) - if self.data.get("app_args"): - self.launch_args.extend(self.data.pop("app_args")) - - # Handle launch environemtns - src_env = self.data.pop("env", None) - if src_env is not None and not isinstance(src_env, dict): - self.log.warning(( - "Passed `env` kwarg has invalid type: {}. Expected: `dict`." - " Using `os.environ` instead." - ).format(str(type(src_env)))) - src_env = None - - if src_env is None: - src_env = os.environ - - ignored_env = {"QT_API", } - env = { - key: str(value) - for key, value in src_env.items() - if key not in ignored_env - } - # subprocess.Popen keyword arguments - self.kwargs = {"env": env} - - if platform.system().lower() == "windows": - # Detach new process from currently running process on Windows - flags = ( - subprocess.CREATE_NEW_PROCESS_GROUP - | subprocess.DETACHED_PROCESS - ) - self.kwargs["creationflags"] = flags - - if not sys.stdout: - self.kwargs["stdout"] = subprocess.DEVNULL - self.kwargs["stderr"] = subprocess.DEVNULL - - self.prelaunch_hooks = None - self.postlaunch_hooks = None - - self.process = None - self._prelaunch_hooks_executed = False - - @property - def env(self): - if ( - "env" not in self.kwargs - or self.kwargs["env"] is None - ): - self.kwargs["env"] = {} - return self.kwargs["env"] - - @env.setter - def env(self, value): - if not isinstance(value, dict): - raise ValueError( - "'env' attribute expect 'dict' object. Got: {}".format( - str(type(value)) - ) - ) - self.kwargs["env"] = value - - @property - def modules_manager(self): - """ - Deprecated: - Use 'addons_manager' instead. - - """ - return self.addons_manager - - def _collect_addons_launch_hook_paths(self): - """Helper to collect application launch hooks from addons. - - Module have to have implemented 'get_launch_hook_paths' method which - can expect application as argument or nothing. - - Returns: - List[str]: Paths to launch hook directories. - """ - - expected_types = (list, tuple, set) - - output = [] - for module in self.addons_manager.get_enabled_addons(): - # Skip module if does not have implemented 'get_launch_hook_paths' - func = getattr(module, "get_launch_hook_paths", None) - if func is None: - continue - - func = module.get_launch_hook_paths - if hasattr(inspect, "signature"): - sig = inspect.signature(func) - expect_args = len(sig.parameters) > 0 - else: - expect_args = len(inspect.getargspec(func)[0]) > 0 - - # Pass application argument if method expect it. - try: - if expect_args: - hook_paths = func(self.application) - else: - hook_paths = func() - except Exception: - self.log.warning( - "Failed to call 'get_launch_hook_paths'", - exc_info=True - ) - continue - - if not hook_paths: - continue - - # Convert string to list - if isinstance(hook_paths, six.string_types): - hook_paths = [hook_paths] - - # Skip invalid types - if not isinstance(hook_paths, expected_types): - self.log.warning(( - "Result of `get_launch_hook_paths`" - " has invalid type {}. Expected {}" - ).format(type(hook_paths), expected_types)) - continue - - output.extend(hook_paths) - return output - - def paths_to_launch_hooks(self): - """Directory paths where to look for launch hooks.""" - # This method has potential to be part of application manager (maybe). - paths = [] - - # TODO load additional studio paths from settings - global_hooks_dir = os.path.join(AYON_CORE_ROOT, "hooks") - - hooks_dirs = [ - global_hooks_dir - ] - if self.host_name: - # If host requires launch hooks and is module then launch hooks - # should be collected using 'collect_launch_hook_paths' - # - module have to implement 'get_launch_hook_paths' - host_module = self.addons_manager.get_host_addon(self.host_name) - if not host_module: - hooks_dirs.append(os.path.join( - AYON_CORE_ROOT, "hosts", self.host_name, "hooks" - )) - - for path in hooks_dirs: - if ( - os.path.exists(path) - and os.path.isdir(path) - and path not in paths - ): - paths.append(path) - - # Load modules paths - paths.extend(self._collect_addons_launch_hook_paths()) - - return paths - - def discover_launch_hooks(self, force=False): - """Load and prepare launch hooks.""" - if ( - self.prelaunch_hooks is not None - or self.postlaunch_hooks is not None - ): - if not force: - self.log.info("Launch hooks were already discovered.") - return - - self.prelaunch_hooks.clear() - self.postlaunch_hooks.clear() - - self.log.debug("Discovery of launch hooks started.") - - paths = self.paths_to_launch_hooks() - self.log.debug("Paths searched for launch hooks:\n{}".format( - "\n".join("- {}".format(path) for path in paths) - )) - - all_classes = { - "pre": [], - "post": [] - } - for path in paths: - if not os.path.exists(path): - self.log.info( - "Path to launch hooks does not exist: \"{}\"".format(path) - ) - continue - - modules, _crashed = modules_from_path(path) - for _filepath, module in modules: - all_classes["pre"].extend( - classes_from_module(PreLaunchHook, module) - ) - all_classes["post"].extend( - classes_from_module(PostLaunchHook, module) - ) - - for launch_type, classes in all_classes.items(): - hooks_with_order = [] - hooks_without_order = [] - for klass in classes: - try: - hook = klass(self) - if not hook.is_valid: - self.log.debug( - "Skipped hook invalid for current launch context: " - "{}".format(klass.__name__) - ) - continue - - if inspect.isabstract(hook): - self.log.debug("Skipped abstract hook: {}".format( - klass.__name__ - )) - continue - - # Separate hooks by pre/post class - if hook.order is None: - hooks_without_order.append(hook) - else: - hooks_with_order.append(hook) - - except Exception: - self.log.warning( - "Initialization of hook failed: " - "{}".format(klass.__name__), - exc_info=True - ) - - # Sort hooks with order by order - ordered_hooks = list(sorted( - hooks_with_order, key=lambda obj: obj.order - )) - # Extend ordered hooks with hooks without defined order - ordered_hooks.extend(hooks_without_order) - - if launch_type == "pre": - self.prelaunch_hooks = ordered_hooks - else: - self.postlaunch_hooks = ordered_hooks - - self.log.debug("Found {} prelaunch and {} postlaunch hooks.".format( - len(self.prelaunch_hooks), len(self.postlaunch_hooks) - )) - - @property - def app_name(self): - return self.application.name - - @property - def host_name(self): - return self.application.host_name - - @property - def app_group(self): - return self.application.group - - @property - def manager(self): - return self.application.manager - - def _run_process(self): - # Windows and MacOS have easier process start - low_platform = platform.system().lower() - if low_platform in ("windows", "darwin"): - return subprocess.Popen(self.launch_args, **self.kwargs) - - # Linux uses mid process - # - it is possible that the mid process executable is not - # available for this version of AYON in that case use standard - # launch - launch_args = get_linux_launcher_args() - if launch_args is None: - return subprocess.Popen(self.launch_args, **self.kwargs) - - # Prepare data that will be passed to midprocess - # - store arguments to a json and pass path to json as last argument - # - pass environments to set - app_env = self.kwargs.pop("env", {}) - json_data = { - "args": self.launch_args, - "env": app_env - } - if app_env: - # Filter environments of subprocess - self.kwargs["env"] = { - key: value - for key, value in os.environ.items() - if key in app_env - } - - # Create temp file - json_temp = tempfile.NamedTemporaryFile( - mode="w", prefix="op_app_args", suffix=".json", delete=False - ) - json_temp.close() - json_temp_filpath = json_temp.name - with open(json_temp_filpath, "w") as stream: - json.dump(json_data, stream) - - launch_args.append(json_temp_filpath) - - # Create mid-process which will launch application - process = subprocess.Popen(launch_args, **self.kwargs) - # Wait until the process finishes - # - This is important! The process would stay in "open" state. - process.wait() - # Remove the temp file - os.remove(json_temp_filpath) - # Return process which is already terminated - return process - - def run_prelaunch_hooks(self): - """Run prelaunch hooks. - - This method will be executed only once, any future calls will skip - the processing. - """ - - if self._prelaunch_hooks_executed: - self.log.warning("Prelaunch hooks were already executed.") - return - # Discover launch hooks - self.discover_launch_hooks() - - # Execute prelaunch hooks - for prelaunch_hook in self.prelaunch_hooks: - self.log.debug("Executing prelaunch hook: {}".format( - str(prelaunch_hook.__class__.__name__) - )) - prelaunch_hook.execute() - self._prelaunch_hooks_executed = True - - def launch(self): - """Collect data for new process and then create it. - - This method must not be executed more than once. - - Returns: - subprocess.Popen: Created process as Popen object. - """ - if self.process is not None: - self.log.warning("Application was already launched.") - return - - if not self._prelaunch_hooks_executed: - self.run_prelaunch_hooks() - - self.log.debug("All prelaunch hook executed. Starting new process.") - - # Prepare subprocess args - args_len_str = "" - if isinstance(self.launch_args, str): - args = self.launch_args - else: - args = self.clear_launch_args(self.launch_args) - args_len_str = " ({})".format(len(args)) - self.log.info( - "Launching \"{}\" with args{}: {}".format( - self.application.full_name, args_len_str, args - ) - ) - self.launch_args = args - - # Run process - self.process = self._run_process() - - # Process post launch hooks - for postlaunch_hook in self.postlaunch_hooks: - self.log.debug("Executing postlaunch hook: {}".format( - str(postlaunch_hook.__class__.__name__) - )) - - # TODO how to handle errors? - # - store to variable to let them accessible? - try: - postlaunch_hook.execute() - - except Exception: - self.log.warning( - "After launch procedures were not successful.", - exc_info=True - ) - - self.log.debug("Launch of {} finished.".format( - self.application.full_name - )) - - return self.process - - @staticmethod - def clear_launch_args(args): - """Collect launch arguments to final order. - - Launch argument should be list that may contain another lists this - function will upack inner lists and keep ordering. - - ``` - # source - [ [ arg1, [ arg2, arg3 ] ], arg4, [arg5, arg6]] - # result - [ arg1, arg2, arg3, arg4, arg5, arg6] - - Args: - args (list): Source arguments in list may contain inner lists. - - Return: - list: Unpacked arguments. - """ - if isinstance(args, str): - return args - all_cleared = False - while not all_cleared: - all_cleared = True - new_args = [] - for arg in args: - if isinstance(arg, (list, tuple, set)): - all_cleared = False - for _arg in arg: - new_args.append(_arg) - else: - new_args.append(arg) - args = new_args - - return args - - -class MissingRequiredKey(KeyError): - pass - - -class EnvironmentPrepData(dict): - """Helper dictionary for storin temp data during environment prep. - - Args: - data (dict): Data must contain required keys. - """ - required_keys = ( - "project_entity", "folder_entity", "task_entity", "app", "anatomy" - ) - - def __init__(self, data): - for key in self.required_keys: - if key not in data: - raise MissingRequiredKey(key) - - if not data.get("log"): - data["log"] = get_logger() - - if data.get("env") is None: - data["env"] = os.environ.copy() - - project_name = data["project_entity"]["name"] - if "project_settings" not in data: - data["project_settings"] = get_project_settings(project_name) - - super(EnvironmentPrepData, self).__init__(data) - - -def get_app_environments_for_context( - project_name, - folder_path, - task_name, - app_name, - env_group=None, - launch_type=None, - env=None, - addons_manager=None -): - """Prepare environment variables by context. - Args: - project_name (str): Name of project. - folder_path (str): Folder path. - task_name (str): Name of task. - app_name (str): Name of application that is launched and can be found - by ApplicationManager. - env_group (Optional[str]): Name of environment group. If not passed - default group is used. - launch_type (Optional[str]): Type for which prelaunch hooks are - executed. - env (Optional[dict[str, str]]): Initial environment variables. - `os.environ` is used when not passed. - addons_manager (Optional[AddonsManager]): Initialized modules - manager. - - Returns: - dict: Environments for passed context and application. - """ - - # Prepare app object which can be obtained only from ApplicationManager - app_manager = ApplicationManager() - context = app_manager.create_launch_context( - app_name, - project_name=project_name, - folder_path=folder_path, - task_name=task_name, - env_group=env_group, - launch_type=launch_type, - env=env, - addons_manager=addons_manager, - modules_manager=addons_manager, - ) - context.run_prelaunch_hooks() - return context.env - - -def _merge_env(env, current_env): - """Modified function(merge) from acre module.""" - import acre - - result = current_env.copy() - for key, value in env.items(): - # Keep missing keys by not filling `missing` kwarg - value = acre.lib.partial_format(value, data=current_env) - result[key] = value - return result - - -def _add_python_version_paths(app, env, logger, addons_manager): - """Add vendor packages specific for a Python version.""" - - for addon in addons_manager.get_enabled_addons(): - addon.modify_application_launch_arguments(app, env) - - # Skip adding if host name is not set - if not app.host_name: - return - - # Add Python 2/3 modules - python_vendor_dir = os.path.join( - AYON_CORE_ROOT, - "vendor", - "python" - ) - if app.use_python_2: - pythonpath = os.path.join(python_vendor_dir, "python_2") - else: - pythonpath = os.path.join(python_vendor_dir, "python_3") - - if not os.path.exists(pythonpath): - return - - logger.debug("Adding Python version specific paths to PYTHONPATH") - python_paths = [pythonpath] - - # Load PYTHONPATH from current launch context - python_path = env.get("PYTHONPATH") - if python_path: - python_paths.append(python_path) - - # Set new PYTHONPATH to launch context environments - env["PYTHONPATH"] = os.pathsep.join(python_paths) - - -def prepare_app_environments( - data, env_group=None, implementation_envs=True, addons_manager=None -): - """Modify launch environments based on launched app and context. - - Args: - data (EnvironmentPrepData): Dictionary where result and intermediate - result will be stored. - """ - import acre - - app = data["app"] - log = data["log"] - source_env = data["env"].copy() - - if addons_manager is None: - from ayon_core.addon import AddonsManager - - addons_manager = AddonsManager() - - _add_python_version_paths(app, source_env, log, addons_manager) - - # Use environments from local settings - filtered_local_envs = {} - # NOTE Overrides for environment variables are not implemented in AYON. - # project_settings = data["project_settings"] - # whitelist_envs = project_settings["general"].get("local_env_white_list") - # if whitelist_envs: - # local_settings = get_local_settings() - # local_envs = local_settings.get("environments") or {} - # filtered_local_envs = { - # key: value - # for key, value in local_envs.items() - # if key in whitelist_envs - # } - - # Apply local environment variables for already existing values - for key, value in filtered_local_envs.items(): - if key in source_env: - source_env[key] = value - - # `app_and_tool_labels` has debug purpose - app_and_tool_labels = [app.full_name] - # Environments for application - environments = [ - app.group.environment, - app.environment - ] - - folder_entity = data.get("folder_entity") - # Add tools environments - groups_by_name = {} - tool_by_group_name = collections.defaultdict(dict) - if folder_entity: - # Make sure each tool group can be added only once - for key in folder_entity["attrib"].get("tools") or []: - tool = app.manager.tools.get(key) - if not tool or not tool.is_valid_for_app(app): - continue - groups_by_name[tool.group.name] = tool.group - tool_by_group_name[tool.group.name][tool.name] = tool - - for group_name in sorted(groups_by_name.keys()): - group = groups_by_name[group_name] - environments.append(group.environment) - for tool_name in sorted(tool_by_group_name[group_name].keys()): - tool = tool_by_group_name[group_name][tool_name] - environments.append(tool.environment) - app_and_tool_labels.append(tool.full_name) - - log.debug( - "Will add environments for apps and tools: {}".format( - ", ".join(app_and_tool_labels) - ) - ) - - env_values = {} - for _env_values in environments: - if not _env_values: - continue - - # Choose right platform - tool_env = parse_environments(_env_values, env_group) - - # Apply local environment variables - # - must happen between all values because they may be used during - # merge - for key, value in filtered_local_envs.items(): - if key in tool_env: - tool_env[key] = value - - # Merge dictionaries - env_values = _merge_env(tool_env, env_values) - - merged_env = _merge_env(env_values, source_env) - - loaded_env = acre.compute(merged_env, cleanup=False) - - final_env = None - # Add host specific environments - if app.host_name and implementation_envs: - host_addon = addons_manager.get_host_addon(app.host_name) - if not host_addon: - module = __import__("ayon_core.hosts", fromlist=[app.host_name]) - host_module = getattr(module, app.host_name, None) - add_implementation_envs = None - if host_addon: - add_implementation_envs = getattr( - host_addon, "add_implementation_envs", None - ) - if add_implementation_envs: - # Function may only modify passed dict without returning value - final_env = add_implementation_envs(loaded_env, app) - - if final_env is None: - final_env = loaded_env - - keys_to_remove = set(source_env.keys()) - set(final_env.keys()) - - # Update env - data["env"].update(final_env) - for key in keys_to_remove: - data["env"].pop(key, None) - - -def apply_project_environments_value( - project_name, env, project_settings=None, env_group=None -): - """Apply project specific environments on passed environments. - - The environments are applied on passed `env` argument value so it is not - required to apply changes back. - - Args: - project_name (str): Name of project for which environments should be - received. - env (dict): Environment values on which project specific environments - will be applied. - project_settings (dict): Project settings for passed project name. - Optional if project settings are already prepared. - - Returns: - dict: Passed env values with applied project environments. - - Raises: - KeyError: If project settings do not contain keys for project specific - environments. - """ - import acre - - if project_settings is None: - project_settings = get_project_settings(project_name) - - env_value = project_settings["core"]["project_environments"] - if env_value: - env_value = json.loads(env_value) - parsed_value = parse_environments(env_value, env_group) - env.update(acre.compute( - _merge_env(parsed_value, env), - cleanup=False - )) - return env - - -def prepare_context_environments(data, env_group=None, addons_manager=None): - """Modify launch environments with context data for launched host. - - Args: - data (EnvironmentPrepData): Dictionary where result and intermediate - result will be stored. - """ - - from ayon_core.pipeline.template_data import get_template_data - - # Context environments - log = data["log"] - - project_entity = data["project_entity"] - folder_entity = data["folder_entity"] - task_entity = data["task_entity"] - if not project_entity: - log.info( - "Skipping context environments preparation." - " Launch context does not contain required data." - ) - return - - # Load project specific environments - project_name = project_entity["name"] - project_settings = get_project_settings(project_name) - data["project_settings"] = project_settings - - app = data["app"] - context_env = { - "AYON_PROJECT_NAME": project_entity["name"], - "AYON_APP_NAME": app.full_name - } - if folder_entity: - folder_path = folder_entity["path"] - context_env["AYON_FOLDER_PATH"] = folder_path - - if task_entity: - context_env["AYON_TASK_NAME"] = task_entity["name"] - - log.debug( - "Context environments set:\n{}".format( - json.dumps(context_env, indent=4) - ) - ) - data["env"].update(context_env) - - # Apply project specific environments on current env value - # - apply them once the context environments are set - apply_project_environments_value( - project_name, data["env"], project_settings, env_group - ) - - if not app.is_host: - return - - data["env"]["AYON_HOST_NAME"] = app.host_name - - if not folder_entity or not task_entity: - # QUESTION replace with log.info and skip workfile discovery? - # - technically it should be possible to launch host without context - raise ApplicationLaunchFailed( - "Host launch require folder and task context." - ) - - workdir_data = get_template_data( - project_entity, - folder_entity, - task_entity, - app.host_name, - project_settings - ) - data["workdir_data"] = workdir_data - - anatomy = data["anatomy"] - - task_type = workdir_data["task"]["type"] - # Temp solution how to pass task type to `_prepare_last_workfile` - data["task_type"] = task_type - - try: - from ayon_core.pipeline.workfile import get_workdir_with_workdir_data - - workdir = get_workdir_with_workdir_data( - workdir_data, - anatomy.project_name, - anatomy, - project_settings=project_settings - ) - - except Exception as exc: - raise ApplicationLaunchFailed( - "Error in anatomy.format: {}".format(str(exc)) - ) - - if not os.path.exists(workdir): - log.debug( - "Creating workdir folder: \"{}\"".format(workdir) - ) - try: - os.makedirs(workdir) - except Exception as exc: - raise ApplicationLaunchFailed( - "Couldn't create workdir because: {}".format(str(exc)) - ) - - data["env"]["AYON_WORKDIR"] = workdir - - _prepare_last_workfile(data, workdir, addons_manager) - - -def _prepare_last_workfile(data, workdir, addons_manager): - """last workfile workflow preparation. - - Function check if should care about last workfile workflow and tries - to find the last workfile. Both information are stored to `data` and - environments. - - Last workfile is filled always (with version 1) even if any workfile - exists yet. - - Args: - data (EnvironmentPrepData): Dictionary where result and intermediate - result will be stored. - workdir (str): Path to folder where workfiles should be stored. - """ - - from ayon_core.addon import AddonsManager - from ayon_core.pipeline import HOST_WORKFILE_EXTENSIONS - from ayon_core.pipeline.workfile import ( - should_use_last_workfile_on_launch, - should_open_workfiles_tool_on_launch, - ) - - if not addons_manager: - addons_manager = AddonsManager() - - log = data["log"] - - _workdir_data = data.get("workdir_data") - if not _workdir_data: - log.info( - "Skipping last workfile preparation." - " Key `workdir_data` not filled." - ) - return - - app = data["app"] - workdir_data = copy.deepcopy(_workdir_data) - project_name = data["project_name"] - task_name = data["task_name"] - task_type = data["task_type"] - - start_last_workfile = data.get("start_last_workfile") - if start_last_workfile is None: - start_last_workfile = should_use_last_workfile_on_launch( - project_name, app.host_name, task_name, task_type - ) - else: - log.info("Opening of last workfile was disabled by user") - - data["start_last_workfile"] = start_last_workfile - - workfile_startup = should_open_workfiles_tool_on_launch( - project_name, app.host_name, task_name, task_type - ) - data["workfile_startup"] = workfile_startup - - # Store boolean as "0"(False) or "1"(True) - data["env"]["AVALON_OPEN_LAST_WORKFILE"] = ( - str(int(bool(start_last_workfile))) - ) - data["env"]["AYON_WORKFILE_TOOL_ON_START"] = ( - str(int(bool(workfile_startup))) - ) - - _sub_msg = "" if start_last_workfile else " not" - log.debug( - "Last workfile should{} be opened on start.".format(_sub_msg) - ) - - # Last workfile path - last_workfile_path = data.get("last_workfile_path") or "" - if not last_workfile_path: - host_addon = addons_manager.get_host_addon(app.host_name) - if host_addon: - extensions = host_addon.get_workfile_extensions() - else: - extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) - - if extensions: - from ayon_core.pipeline.workfile import ( - get_workfile_template_key, - get_last_workfile - ) - - anatomy = data["anatomy"] - project_settings = data["project_settings"] - task_type = workdir_data["task"]["type"] - template_key = get_workfile_template_key( - project_name, - task_type, - app.host_name, - project_settings=project_settings - ) - # Find last workfile - file_template = anatomy.get_template_item( - "work", template_key, "file" - ).template - - workdir_data.update({ - "version": 1, - "user": get_ayon_username(), - "ext": extensions[0] - }) - - last_workfile_path = get_last_workfile( - workdir, file_template, workdir_data, extensions, True - ) - - if os.path.exists(last_workfile_path): - log.debug(( - "Workfiles for launch context does not exists" - " yet but path will be set." - )) - log.debug( - "Setting last workfile path: {}".format(last_workfile_path) - ) - - data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path - data["last_workfile_path"] = last_workfile_path diff --git a/client/ayon_core/lib/local_settings.py b/client/ayon_core/lib/local_settings.py index 9eba3d1ed1..fd255c997f 100644 --- a/client/ayon_core/lib/local_settings.py +++ b/client/ayon_core/lib/local_settings.py @@ -524,7 +524,7 @@ def get_ayon_appdirs(*args): def get_local_site_id(): """Get local site identifier. - Identifier is created if does not exists yet. + Identifier is created if does not exist yet. """ # used for background syncing site_id = os.environ.get("AYON_SITE_ID") diff --git a/client/ayon_core/lib/path_templates.py b/client/ayon_core/lib/path_templates.py index 09d11ea1de..a766dbd9c1 100644 --- a/client/ayon_core/lib/path_templates.py +++ b/client/ayon_core/lib/path_templates.py @@ -102,7 +102,7 @@ class StringTemplate(object): """ Figure out with whole formatting. Separate advanced keys (*Like '{project[name]}') from string which must - be formatted separatelly in case of missing or incomplete keys in data. + be formatted separately in case of missing or incomplete keys in data. Args: data (dict): Containing keys to be filled into template. diff --git a/client/ayon_core/lib/terminal.py b/client/ayon_core/lib/terminal.py index a22f2358aa..10fcc79a27 100644 --- a/client/ayon_core/lib/terminal.py +++ b/client/ayon_core/lib/terminal.py @@ -1,15 +1,5 @@ # -*- coding: utf-8 -*- """Package helping with colorizing and formatting terminal output.""" -# :: -# //. ... .. ///. //. -# ///\\\ \\\ \\ ///\\\ /// -# /// \\ \\\ \\ /// \\ /// // -# \\\ // \\\ // \\\ // \\\// ./ -# \\\// \\\// \\\// \\\' // -# \\\ \\\ \\\ \\\// -# ''' ''' ''' ''' -# ..---===[[ PyP3 Setup ]]===---... -# import re import time import threading diff --git a/client/ayon_core/lib/transcoding.py b/client/ayon_core/lib/transcoding.py index 08e0bc9237..4d778c2091 100644 --- a/client/ayon_core/lib/transcoding.py +++ b/client/ayon_core/lib/transcoding.py @@ -45,7 +45,7 @@ ARRAY_TYPE_REGEX = re.compile(r"^(int|float|string)\[\d+\]$") IMAGE_EXTENSIONS = { ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", - ".cal", ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", + ".cal", ".cin", ".cpc", ".cpt", ".dds", ".dng", ".dpx", ".ecw", ".exr", ".fits", ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer", ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2", ".jng", ".jpeg", ".jpeg-ls", ".jpeg-hdr", ".2000", ".jpg", diff --git a/client/ayon_core/modules/base.py b/client/ayon_core/modules/base.py index 8a78edf961..3f2a7d4ea5 100644 --- a/client/ayon_core/modules/base.py +++ b/client/ayon_core/modules/base.py @@ -1,3 +1,5 @@ +# Backwards compatibility support +# - TODO should be removed before release 1.0.0 from ayon_core.addon import ( AYONAddon, AddonsManager, @@ -12,3 +14,16 @@ from ayon_core.addon.base import ( ModulesManager = AddonsManager TrayModulesManager = TrayAddonsManager load_modules = load_addons + + +__all__ = ( + "AYONAddon", + "AddonsManager", + "TrayAddonsManager", + "load_addons", + "OpenPypeModule", + "OpenPypeAddOn", + "ModulesManager", + "TrayModulesManager", + "load_modules", +) diff --git a/client/ayon_core/modules/clockify/clockify_api.py b/client/ayon_core/modules/clockify/clockify_api.py index f8c9c537ee..2e1d8f008f 100644 --- a/client/ayon_core/modules/clockify/clockify_api.py +++ b/client/ayon_core/modules/clockify/clockify_api.py @@ -1,6 +1,4 @@ import os -import re -import time import json import datetime import requests diff --git a/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py b/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py index 61c5eac2f5..8381c7d73e 100644 --- a/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py +++ b/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py @@ -11,19 +11,17 @@ class ClockifyStart(LauncherAction): order = 500 clockify_api = ClockifyAPI() - def is_compatible(self, session): + def is_compatible(self, selection): """Return whether the action is compatible with the session""" - if "AYON_TASK_NAME" in session: - return True - return False + return selection.is_task_selected - def process(self, session, **kwargs): + def process(self, selection, **kwargs): self.clockify_api.set_api() user_id = self.clockify_api.user_id workspace_id = self.clockify_api.workspace_id - project_name = session["AYON_PROJECT_NAME"] - folder_path = session["AYON_FOLDER_PATH"] - task_name = session["AYON_TASK_NAME"] + project_name = selection.project_name + folder_path = selection.folder_path + task_name = selection.task_name description = "/".join([folder_path.lstrip("/"), task_name]) # fetch folder entity diff --git a/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py b/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py index 72187c6d28..5388f47c98 100644 --- a/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py +++ b/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py @@ -19,15 +19,18 @@ class ClockifySync(LauncherAction): order = 500 clockify_api = ClockifyAPI() - def is_compatible(self, session): + def is_compatible(self, selection): """Check if there's some projects to sync""" + if selection.is_project_selected: + return True + try: next(ayon_api.get_projects()) return True except StopIteration: return False - def process(self, session, **kwargs): + def process(self, selection, **kwargs): self.clockify_api.set_api() workspace_id = self.clockify_api.workspace_id user_id = self.clockify_api.user_id @@ -37,10 +40,9 @@ class ClockifySync(LauncherAction): raise ClockifyPermissionsCheckFailed( "Current CLockify user is missing permissions for this action!" ) - project_name = session.get("AYON_PROJECT_NAME") or "" - if project_name.strip(): - projects_to_sync = [ayon_api.get_project(project_name)] + if selection.is_project_selected: + projects_to_sync = [selection.project_entity] else: projects_to_sync = ayon_api.get_projects() diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index a284464009..675346105c 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -80,6 +80,8 @@ class AfterEffectsSubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py index ae19e63a37..ab342c1a9d 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py @@ -102,6 +102,8 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py index cf124c0bcc..bfb65708e6 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py @@ -225,6 +225,8 @@ class FusionSubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py index beb8afc3a3..d52b16b27d 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -273,6 +273,8 @@ class HarmonySubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py index 1abefa515a..cba05f6948 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py @@ -106,12 +106,14 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", "AYON_WORKDIR", "AYON_APP_NAME", - "IS_TEST" + "IS_TEST", ] environment = { diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py index 0e871eb90e..0300b12104 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -207,6 +207,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", @@ -651,7 +653,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, return job_info, attr.asdict(plugin_info) def _get_arnold_render_payload(self, data): - from maya import cmds # Job Info job_info = copy.deepcopy(self.job_info) job_info.Name = self._job_info_label("Render") @@ -856,10 +857,10 @@ def _format_tiles( """ # Math used requires integers for correct output - as such # we ensure our inputs are correct. - assert type(tiles_x) is int, "tiles_x must be an integer" - assert type(tiles_y) is int, "tiles_y must be an integer" - assert type(width) is int, "width must be an integer" - assert type(height) is int, "height must be an integer" + assert isinstance(tiles_x, int), "tiles_x must be an integer" + assert isinstance(tiles_y, int), "tiles_y must be an integer" + assert isinstance(width, int), "width must be an integer" + assert isinstance(height, int), "height must be an integer" out = {"JobInfo": {}, "PluginInfo": {}} cfg = OrderedDict() diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py index ac01af901c..d70cb75bf3 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -376,6 +376,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, keys = [ "PYTHONPATH", "PATH", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", @@ -388,7 +390,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, "TOOL_ENV", "FOUNDRY_LICENSE", "OPENPYPE_SG_USER", - "AYON_BUNDLE_NAME", ] # add allowed keys from preset if any diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py index 50bd414587..910b2e46db 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py @@ -133,6 +133,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, "AYON_RENDER_JOB": "0", "AYON_REMOTE_PUBLISH": "0", "AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"], + "AYON_DEFAULT_SETTINGS_VARIANT": ( + os.environ["AYON_DEFAULT_SETTINGS_VARIANT"] + ), } # add environments from self.environ_keys diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py index 84bac6d017..af5839d0cf 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py @@ -210,6 +210,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, "AYON_RENDER_JOB": "0", "AYON_REMOTE_PUBLISH": "0", "AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"], + "AYON_DEFAULT_SETTINGS_VARIANT": ( + os.environ["AYON_DEFAULT_SETTINGS_VARIANT"] + ), } # add environments from self.environ_keys diff --git a/client/ayon_core/modules/deadline/repository/custom/plugins/Ayon/Ayon.py b/client/ayon_core/modules/deadline/repository/custom/plugins/Ayon/Ayon.py index de0a2c6d7a..bb7f932013 100644 --- a/client/ayon_core/modules/deadline/repository/custom/plugins/Ayon/Ayon.py +++ b/client/ayon_core/modules/deadline/repository/custom/plugins/Ayon/Ayon.py @@ -7,7 +7,6 @@ from Deadline.Plugins import PluginType, DeadlinePlugin from Deadline.Scripting import ( StringUtils, FileUtils, - DirectoryUtils, RepositoryUtils ) diff --git a/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 1565b2c496..ac04407f5b 100644 --- a/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -12,9 +12,8 @@ from Deadline.Scripting import ( RepositoryUtils, FileUtils, DirectoryUtils, - ProcessUtils, ) -__version__ = "1.0.1" +__version__ = "1.1.0" VERSION_REGEX = re.compile( r"(?P0|[1-9]\d*)" r"\.(?P0|[1-9]\d*)" @@ -464,19 +463,13 @@ def inject_ayon_environment(deadlinePlugin): export_url = os.path.join(tempfile.gettempdir(), temp_file_name) print(">>> Temporary path: {}".format(export_url)) - args = [ - "--headless", - "extractenvironments", - export_url - ] - add_kwargs = { "envgroup": "farm", } # Support backwards compatible keys for key, env_keys in ( ("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]), - ("asset", ["AYON_FOLDER_PATH", "AVALON_ASSET"]), + ("folder", ["AYON_FOLDER_PATH", "AVALON_ASSET"]), ("task", ["AYON_TASK_NAME", "AVALON_TASK"]), ("app", ["AYON_APP_NAME", "AVALON_APP_NAME"]), ): @@ -487,18 +480,37 @@ def inject_ayon_environment(deadlinePlugin): break add_kwargs[key] = value - if job.GetJobEnvironmentKeyValue("IS_TEST"): - args.append("--automatic-tests") - - if all(add_kwargs.values()): - for key, value in add_kwargs.items(): - args.extend(["--{}".format(key), value]) - else: + if not all(add_kwargs.values()): raise RuntimeError(( "Missing required env vars: AYON_PROJECT_NAME," " AYON_FOLDER_PATH, AYON_TASK_NAME, AYON_APP_NAME" )) + # Use applications addon arguments + # TODO validate if applications addon should be used + args = [ + "--headless", + "addon", + "applications", + "extractenvironments", + export_url + ] + # Backwards compatibility for older versions + legacy_args = [ + "--headless", + "extractenvironments", + export_url + ] + if job.GetJobEnvironmentKeyValue("IS_TEST"): + args.append("--automatic-tests") + + for key, value in add_kwargs.items(): + args.extend(["--{}".format(key), value]) + # Legacy arguments expect '--asset' instead of '--folder' + if key == "folder": + key = "asset" + legacy_args.extend(["--{}".format(key), value]) + environment = { "AYON_SERVER_URL": ayon_server_url, "AYON_API_KEY": ayon_api_key, @@ -517,9 +529,18 @@ def inject_ayon_environment(deadlinePlugin): ) if process_exitcode != 0: - raise RuntimeError( - "Failed to run Ayon process to extract environments." + print( + "Failed to run AYON process to extract environments. Trying" + " to use legacy arguments." ) + legacy_args_str = subprocess.list2cmdline(legacy_args) + process_exitcode = deadlinePlugin.RunProcess( + exe, legacy_args_str, os.path.dirname(exe), -1 + ) + if process_exitcode != 0: + raise RuntimeError( + "Failed to run AYON process to extract environments." + ) print(">>> Loading file ...") with open(export_url) as fp: diff --git a/client/ayon_core/modules/job_queue/addon.py b/client/ayon_core/modules/job_queue/addon.py index 32d06d0040..0fa54eb2f0 100644 --- a/client/ayon_core/modules/job_queue/addon.py +++ b/client/ayon_core/modules/job_queue/addon.py @@ -168,7 +168,7 @@ class JobQueueAddon(AYONAddon): @classmethod def start_worker(cls, app_name, server_url=None): import requests - from ayon_core.lib import ApplicationManager + from ayon_applications import ApplicationManager if not server_url: server_url = cls.get_server_url_from_settings() diff --git a/client/ayon_core/modules/launcher_action.py b/client/ayon_core/modules/launcher_action.py index 1faf6ef4b1..38e88d36ca 100644 --- a/client/ayon_core/modules/launcher_action.py +++ b/client/ayon_core/modules/launcher_action.py @@ -37,20 +37,6 @@ class LauncherAction(AYONAddon, ITrayAction): if path and os.path.exists(path): register_launcher_action_path(path) - paths_str = os.environ.get("AVALON_ACTIONS") or "" - if paths_str: - self.log.warning( - "WARNING: 'AVALON_ACTIONS' is deprecated. Support of this" - " environment variable will be removed in future versions." - " Please consider using 'OpenPypeModule' to define custom" - " action paths. Planned version to drop the support" - " is 3.17.2 or 3.18.0 ." - ) - - for path in paths_str.split(os.pathsep): - if path and os.path.exists(path): - register_launcher_action_path(path) - def on_action_trigger(self): """Implementation for ITrayAction interface. diff --git a/client/ayon_core/modules/royalrender/lib.py b/client/ayon_core/modules/royalrender/lib.py index d552e7fb19..82bc96e759 100644 --- a/client/ayon_core/modules/royalrender/lib.py +++ b/client/ayon_core/modules/royalrender/lib.py @@ -2,7 +2,6 @@ """Submitting render job to RoyalRender.""" import os import json -import platform import re import tempfile import uuid @@ -309,31 +308,45 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin, export_url = os.path.join(tempfile.gettempdir(), temp_file_name) print(">>> Temporary path: {}".format(export_url)) - args = [ - "--headless", - "extractenvironments", - export_url - ] - anatomy_data = instance.context.data["anatomyData"] + addons_manager = instance.context.data["ayonAddonsManager"] + applications_addon = addons_manager.get_enabled_addon("applications") + + folder_key = "folder" + if applications_addon is None: + # Use 'asset' when applications addon command is not used + folder_key = "asset" add_kwargs = { "project": anatomy_data["project"]["name"], - "asset": instance.context.data["folderPath"], + folder_key: instance.context.data["folderPath"], "task": anatomy_data["task"]["name"], "app": instance.context.data.get("appName"), "envgroup": "farm" } - if os.getenv('IS_TEST'): - args.append("--automatic-tests") - if not all(add_kwargs.values()): raise RuntimeError(( "Missing required env vars: AYON_PROJECT_NAME, AYON_FOLDER_PATH," " AYON_TASK_NAME, AYON_APP_NAME" )) + args = ["--headless"] + # Use applications addon to extract environments + # NOTE this is for backwards compatibility, the global command + # will be removed in future and only applications addon command + # should be used. + if applications_addon is not None: + args.extend(["addon", "applications"]) + + args.extend([ + "extractenvironments", + export_url + ]) + + if os.getenv('IS_TEST'): + args.append("--automatic-tests") + for key, value in add_kwargs.items(): args.extend([f"--{key}", value]) self.log.debug("Executing: {}".format(" ".join(args))) diff --git a/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py b/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py index 5d177fec07..662913cadf 100644 --- a/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py +++ b/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py @@ -198,7 +198,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin, priority = self.priority or instance.data.get("priority", 50) - # rr requires absolut path or all jobs won't show up in rControl + # rr requires absolute path or all jobs won't show up in rrControl abs_metadata_path = self.anatomy.fill_root(rootless_metadata_path) # command line set in E01__OpenPype__PublishJob.cfg, here only diff --git a/client/ayon_core/modules/royalrender/plugins/publish/submit_jobs_to_royalrender.py b/client/ayon_core/modules/royalrender/plugins/publish/submit_jobs_to_royalrender.py index 54de943428..09c1dc4a54 100644 --- a/client/ayon_core/modules/royalrender/plugins/publish/submit_jobs_to_royalrender.py +++ b/client/ayon_core/modules/royalrender/plugins/publish/submit_jobs_to_royalrender.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Submit jobs to RoyalRender.""" import tempfile -import platform import pyblish.api from ayon_core.modules.royalrender.api import ( diff --git a/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py b/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py index da5d430939..b402d4034a 100644 --- a/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py +++ b/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PostLaunchHook, LaunchTypes +from ayon_applications import PostLaunchHook, LaunchTypes class PostStartTimerHook(PostLaunchHook): diff --git a/client/ayon_core/pipeline/actions.py b/client/ayon_core/pipeline/actions.py index 8e0ce7e583..eae2fc94b5 100644 --- a/client/ayon_core/pipeline/actions.py +++ b/client/ayon_core/pipeline/actions.py @@ -1,4 +1,8 @@ import logging +import warnings + +import ayon_api + from ayon_core.pipeline.plugin_discover import ( discover, register_plugin, @@ -10,6 +14,288 @@ from ayon_core.pipeline.plugin_discover import ( from .load.utils import get_representation_path_from_context +class LauncherActionSelection: + """Object helper to pass selection to actions. + + Object support backwards compatibility for 'session' from OpenPype where + environment variable keys were used to define selection. + + Args: + project_name (str): Selected project name. + folder_id (str): Selected folder id. + task_id (str): Selected task id. + folder_path (Optional[str]): Selected folder path. + task_name (Optional[str]): Selected task name. + project_entity (Optional[dict[str, Any]]): Project entity. + folder_entity (Optional[dict[str, Any]]): Folder entity. + task_entity (Optional[dict[str, Any]]): Task entity. + + """ + def __init__( + self, + project_name, + folder_id, + task_id, + folder_path=None, + task_name=None, + project_entity=None, + folder_entity=None, + task_entity=None + ): + self._project_name = project_name + self._folder_id = folder_id + self._task_id = task_id + + self._folder_path = folder_path + self._task_name = task_name + + self._project_entity = project_entity + self._folder_entity = folder_entity + self._task_entity = task_entity + + def __getitem__(self, key): + warnings.warn( + ( + "Using deprecated access to selection data. Please use" + " attributes and methods" + " defined by 'LauncherActionSelection'." + ), + category=DeprecationWarning + ) + if key in {"AYON_PROJECT_NAME", "AVALON_PROJECT"}: + return self.project_name + if key in {"AYON_FOLDER_PATH", "AVALON_ASSET"}: + return self.folder_path + if key in {"AYON_TASK_NAME", "AVALON_TASK"}: + return self.task_name + raise KeyError(f"Key: {key} not found") + + def __iter__(self): + for key in self.keys(): + yield key + + def __contains__(self, key): + warnings.warn( + ( + "Using deprecated access to selection data. Please use" + " attributes and methods" + " defined by 'LauncherActionSelection'." + ), + category=DeprecationWarning + ) + # Fake missing keys check for backwards compatibility + if key in { + "AYON_PROJECT_NAME", + "AVALON_PROJECT", + }: + return self._project_name is not None + if key in { + "AYON_FOLDER_PATH", + "AVALON_ASSET", + }: + return self._folder_id is not None + if key in { + "AYON_TASK_NAME", + "AVALON_TASK", + }: + return self._task_id is not None + return False + + def get(self, key, default=None): + """ + + Deprecated: + Added for backwards compatibility with older actions. + + """ + warnings.warn( + ( + "Using deprecated access to selection data. Please use" + " attributes and methods" + " defined by 'LauncherActionSelection'." + ), + category=DeprecationWarning + ) + try: + return self[key] + except KeyError: + return default + + def items(self): + """ + + Deprecated: + Added for backwards compatibility with older actions. + + """ + for key, value in ( + ("AYON_PROJECT_NAME", self.project_name), + ("AYON_FOLDER_PATH", self.folder_path), + ("AYON_TASK_NAME", self.task_name), + ): + if value is not None: + yield (key, value) + + def keys(self): + """ + + Deprecated: + Added for backwards compatibility with older actions. + + """ + for key, _ in self.items(): + yield key + + def values(self): + """ + + Deprecated: + Added for backwards compatibility with older actions. + + """ + for _, value in self.items(): + yield value + + def get_project_name(self): + """Selected project name. + + Returns: + Union[str, None]: Selected project name. + + """ + return self._project_name + + def get_folder_id(self): + """Selected folder id. + + Returns: + Union[str, None]: Selected folder id. + + """ + return self._folder_id + + def get_folder_path(self): + """Selected folder path. + + Returns: + Union[str, None]: Selected folder path. + + """ + if self._folder_id is None: + return None + if self._folder_path is None: + self._folder_path = self.folder_entity["path"] + return self._folder_path + + def get_task_id(self): + """Selected task id. + + Returns: + Union[str, None]: Selected task id. + + """ + return self._task_id + + def get_task_name(self): + """Selected task name. + + Returns: + Union[str, None]: Selected task name. + + """ + if self._task_id is None: + return None + if self._task_name is None: + self._task_name = self.task_entity["name"] + return self._task_name + + def get_project_entity(self): + """Project entity for the selection. + + Returns: + Union[dict[str, Any], None]: Project entity. + + """ + if self._project_name is None: + return None + if self._project_entity is None: + self._project_entity = ayon_api.get_project(self._project_name) + return self._project_entity + + def get_folder_entity(self): + """Folder entity for the selection. + + Returns: + Union[dict[str, Any], None]: Folder entity. + + """ + if self._project_name is None or self._folder_id is None: + return None + if self._folder_entity is None: + self._folder_entity = ayon_api.get_folder_by_id( + self._project_name, self._folder_id + ) + return self._folder_entity + + def get_task_entity(self): + """Task entity for the selection. + + Returns: + Union[dict[str, Any], None]: Task entity. + + """ + if ( + self._project_name is None + or self._task_id is None + ): + return None + if self._task_entity is None: + self._task_entity = ayon_api.get_task_by_id( + self._project_name, self._task_id + ) + return self._task_entity + + @property + def is_project_selected(self): + """Return whether a project is selected. + + Returns: + bool: Whether a project is selected. + + """ + return self._project_name is not None + + @property + def is_folder_selected(self): + """Return whether a folder is selected. + + Returns: + bool: Whether a folder is selected. + + """ + return self._folder_id is not None + + @property + def is_task_selected(self): + """Return whether a task is selected. + + Returns: + bool: Whether a task is selected. + + """ + return self._task_id is not None + + project_name = property(get_project_name) + folder_id = property(get_folder_id) + task_id = property(get_task_id) + folder_path = property(get_folder_path) + task_name = property(get_task_name) + + project_entity = property(get_project_entity) + folder_entity = property(get_folder_entity) + task_entity = property(get_task_entity) + + class LauncherAction(object): """A custom action available""" name = None @@ -21,17 +307,23 @@ class LauncherAction(object): log = logging.getLogger("LauncherAction") log.propagate = True - def is_compatible(self, session): + def is_compatible(self, selection): """Return whether the class is compatible with the Session. Args: - session (dict[str, Union[str, None]]): Session data with - AYON_PROJECT_NAME, AYON_FOLDER_PATH and AYON_TASK_NAME. - """ + selection (LauncherActionSelection): Data with selection. + """ return True - def process(self, session, **kwargs): + def process(self, selection, **kwargs): + """Process the action. + + Args: + selection (LauncherActionSelection): Data with selection. + **kwargs: Additional arguments. + + """ pass diff --git a/client/ayon_core/pipeline/anatomy/anatomy.py b/client/ayon_core/pipeline/anatomy/anatomy.py index 0d250116bd..73dd215233 100644 --- a/client/ayon_core/pipeline/anatomy/anatomy.py +++ b/client/ayon_core/pipeline/anatomy/anatomy.py @@ -447,7 +447,7 @@ class CacheItem: class Anatomy(BaseAnatomy): - _sync_server_addon_cache = CacheItem() + _sitesync_addon_cache = CacheItem() _project_cache = collections.defaultdict(CacheItem) _default_site_id_cache = collections.defaultdict(CacheItem) _root_overrides_cache = collections.defaultdict( @@ -482,13 +482,13 @@ class Anatomy(BaseAnatomy): return copy.deepcopy(project_cache.data) @classmethod - def get_sync_server_addon(cls): - if cls._sync_server_addon_cache.is_outdated: + def get_sitesync_addon(cls): + if cls._sitesync_addon_cache.is_outdated: manager = AddonsManager() - cls._sync_server_addon_cache.update_data( - manager.get_enabled_addon("sync_server") + cls._sitesync_addon_cache.update_data( + manager.get_enabled_addon("sitesync") ) - return cls._sync_server_addon_cache.data + return cls._sitesync_addon_cache.data @classmethod def _get_studio_roots_overrides(cls, project_name): @@ -525,8 +525,8 @@ class Anatomy(BaseAnatomy): """ # First check if sync server is available and enabled - sync_server = cls.get_sync_server_addon() - if sync_server is None or not sync_server.enabled: + sitesync_addon = cls.get_sitesync_addon() + if sitesync_addon is None or not sitesync_addon.enabled: # QUESTION is ok to force 'studio' when site sync is not enabled? site_name = "studio" @@ -535,7 +535,7 @@ class Anatomy(BaseAnatomy): project_cache = cls._default_site_id_cache[project_name] if project_cache.is_outdated: project_cache.update_data( - sync_server.get_active_site_type(project_name) + sitesync_addon.get_active_site_type(project_name) ) site_name = project_cache.data @@ -549,7 +549,7 @@ class Anatomy(BaseAnatomy): ) else: # Ask sync server to get roots overrides - roots_overrides = sync_server.get_site_root_overrides( + roots_overrides = sitesync.get_site_root_overrides( project_name, site_name ) site_cache.update_data(roots_overrides) diff --git a/client/ayon_core/pipeline/colorspace.py b/client/ayon_core/pipeline/colorspace.py index 034c90d27b..efa3bbf968 100644 --- a/client/ayon_core/pipeline/colorspace.py +++ b/client/ayon_core/pipeline/colorspace.py @@ -23,7 +23,7 @@ log = Logger.get_logger(__name__) class CachedData: - remapping = None + remapping = {} has_compatible_ocio_package = None config_version_data = {} ocio_config_colorspaces = {} diff --git a/client/ayon_core/pipeline/context_tools.py b/client/ayon_core/pipeline/context_tools.py index 84a17be8f2..e9151bcd1f 100644 --- a/client/ayon_core/pipeline/context_tools.py +++ b/client/ayon_core/pipeline/context_tools.py @@ -97,8 +97,8 @@ def install_host(host): """Install `host` into the running Python session. Args: - host (module): A Python module containing the Avalon - avalon host-interface. + host (HostBase): A host interface object. + """ global _is_installed @@ -154,6 +154,13 @@ def install_host(host): def install_ayon_plugins(project_name=None, host_name=None): + """Install AYON core plugins and make sure the core is initialized. + + Args: + project_name (Optional[str]): Name of project to install plugins for. + host_name (Optional[str]): Name of host to install plugins for. + + """ # Make sure global AYON connection has set site id and version # - this is necessary if 'install_host' is not called initialize_ayon_connection() @@ -223,6 +230,12 @@ def install_ayon_plugins(project_name=None, host_name=None): def install_openpype_plugins(project_name=None, host_name=None): + """Install AYON core plugins and make sure the core is initialized. + + Deprecated: + Use `install_ayon_plugins` instead. + + """ install_ayon_plugins(project_name, host_name) @@ -281,47 +294,6 @@ def deregister_host(): _registered_host["_"] = None -def debug_host(): - """A debug host, useful to debugging features that depend on a host""" - - host = types.ModuleType("debugHost") - - def ls(): - containers = [ - { - "representation": "ee-ft-a-uuid1", - "schema": "openpype:container-1.0", - "name": "Bruce01", - "objectName": "Bruce01_node", - "namespace": "_bruce01_", - "version": 3, - }, - { - "representation": "aa-bc-s-uuid2", - "schema": "openpype:container-1.0", - "name": "Bruce02", - "objectName": "Bruce01_node", - "namespace": "_bruce02_", - "version": 2, - } - ] - - for container in containers: - yield container - - host.__dict__.update({ - "ls": ls, - "open_file": lambda fname: None, - "save_file": lambda fname: None, - "current_file": lambda: os.path.expanduser("~/temp.txt"), - "has_unsaved_changes": lambda: False, - "work_root": lambda: os.path.expanduser("~/temp"), - "file_extensions": lambda: ["txt"], - }) - - return host - - def get_current_host_name(): """Current host name. @@ -347,7 +319,8 @@ def get_global_context(): Use 'get_current_context' to make sure you'll get current host integration context info. - Example: + Example:: + { "project_name": "Commercial", "folder_path": "Bunny", @@ -411,42 +384,67 @@ def get_current_project_entity(fields=None): return ayon_api.get_project(project_name, fields=fields) -def get_current_project_folder(folder_path=None, folder_id=None, fields=None): +def get_current_folder_entity(fields=None): """Helper function to get folder entity based on current context. This function should be called only in process where host is installed. - Folder is found out based on passed folder path or id (not both). Folder - path is not used for filtering if folder id is passed. When both - folder path and id are missing then current folder path is used. + Folder is based on current context project name and folder path. Args: - folder_path (Union[str, None]): Folder path used for filter. - folder_id (Union[str, None]): Folder id. If entered then - is used as only filter. fields (Optional[Iterable[str]]): Limit returned data of folder entity to specific keys. Returns: - Union[dict[str, Any], None]: Fodler entity or None. + Union[dict[str, Any], None]: Folder entity or None. + """ + context = get_current_context() + project_name = context["project_name"] + folder_path = context["folder_path"] - project_name = get_current_project_name() - if folder_id: - return ayon_api.get_folder_by_id( - project_name, folder_id, fields=fields - ) - - if not folder_path: - folder_path = get_current_folder_path() - # Skip if is not set even on context - if not folder_path: - return None + # Skip if is not set even on context + if not project_name or not folder_path: + return None return ayon_api.get_folder_by_path( project_name, folder_path, fields=fields ) +def get_current_task_entity(fields=None): + """Helper function to get task entity based on current context. + + This function should be called only in process where host is installed. + + Task is based on current context project name, folder path + and task name. + + Args: + fields (Optional[Iterable[str]]): Limit returned data of task entity + to specific keys. + + Returns: + Union[dict[str, Any], None]: Task entity or None. + + """ + context = get_current_context() + project_name = context["project_name"] + folder_path = context["folder_path"] + task_name = context["task_name"] + + # Skip if is not set even on context + if not project_name or not folder_path or not task_name: + return None + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"id"} + ) + if not folder_entity: + return None + return ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name, fields=fields + ) + + def is_representation_from_latest(representation): """Return whether the representation is from latest version @@ -515,88 +513,13 @@ def get_current_context_template_data(settings=None): ) -def get_workdir_from_session(session=None, template_key=None): - """Template data for template fill from session keys. - - Args: - session (Union[Dict[str, str], None]): The Session to use. If not - provided use the currently active global Session. - template_key (str): Prepared template key from which workdir is - calculated. - - Returns: - str: Workdir path. - """ - - if session is not None: - project_name = session["AYON_PROJECT_NAME"] - host_name = session["AYON_HOST_NAME"] - else: - project_name = get_current_project_name() - host_name = get_current_host_name() - template_data = get_template_data_from_session(session) - - if not template_key: - task_type = template_data["task"]["type"] - template_key = get_workfile_template_key( - project_name, - task_type, - host_name, - ) - - anatomy = Anatomy(project_name) - template_obj = anatomy.get_template_item("work", template_key, "directory") - path = template_obj.format_strict(template_data) - if path: - path = os.path.normpath(path) - return path - - -def get_custom_workfile_template_from_session( - session=None, project_settings=None -): - """Filter and fill workfile template profiles by current context. - - This function cab be used only inside host where context is set. - - Args: - session (Optional[Dict[str, str]]): Session from which are taken - data. - project_settings(Optional[Dict[str, Any]]): Project settings. - - Returns: - str: Path to template or None if none of profiles match current - context. (Existence of formatted path is not validated.) - """ - - if session is not None: - project_name = session["AYON_PROJECT_NAME"] - folder_path = session["AYON_FOLDER_PATH"] - task_name = session["AYON_TASK_NAME"] - host_name = session["AYON_HOST_NAME"] - else: - context = get_current_context() - project_name = context["project_name"] - folder_path = context["folder_path"] - task_name = context["task_name"] - host_name = get_current_host_name() - - return get_custom_workfile_template_by_string_context( - project_name, - folder_path, - task_name, - host_name, - project_settings=project_settings - ) - - def get_current_context_custom_workfile_template(project_settings=None): """Filter and fill workfile template profiles by current context. - This function can be used only inside host where context is set. + This function can be used only inside host where current context is set. Args: - project_settings(Optional[Dict[str, Any]]): Project settings. + project_settings (Optional[dict[str, Any]]): Project settings Returns: str: Path to template or None if none of profiles match current diff --git a/client/ayon_core/pipeline/create/README.md b/client/ayon_core/pipeline/create/README.md index bbfd1bfa0f..09d3a22222 100644 --- a/client/ayon_core/pipeline/create/README.md +++ b/client/ayon_core/pipeline/create/README.md @@ -8,7 +8,7 @@ Discovers Creator plugins to be able create new instances and convert existing i Publish plugins are loaded because they can also define attributes definitions. These are less product type specific To be able define attributes Publish plugin must inherit from `AYONPyblishPluginMixin` and must override `get_attribute_defs` class method which must return list of attribute definitions. Values of publish plugin definitions are stored per plugin name under `publish_attributes`. Also can override `convert_attribute_values` class method which gives ability to modify values on instance before are used in CreatedInstance. Method `convert_attribute_values` can be also used without `get_attribute_defs` to modify values when changing compatibility (remove metadata from instance because are irrelevant). -Possible attribute definitions can be found in `openpype/pipeline/lib/attribute_definitions.py`. +Possible attribute definitions can be found in `ayon_core/lib/attribute_definitions.py`. Except creating and removing instances are all changes not automatically propagated to host context (scene/workfile/...) to propagate changes call `save_changes` which trigger update of all instances in context using Creators implementation. diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 8c6a7f1bb6..ca9896fb3f 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -529,7 +529,7 @@ class AttributeValues(object): Has dictionary like methods. Not all of them are allowed all the time. Args: - attr_defs(AbstractAttrDef): Defintions of value type and properties. + attr_defs(AbstractAttrDef): Definitions of value type and properties. values(dict): Values after possible conversion. origin_data(dict): Values loaded from host before conversion. """ diff --git a/client/ayon_core/pipeline/create/creator_plugins.py b/client/ayon_core/pipeline/create/creator_plugins.py index 5505427d7e..e0b30763d0 100644 --- a/client/ayon_core/pipeline/create/creator_plugins.py +++ b/client/ayon_core/pipeline/create/creator_plugins.py @@ -347,7 +347,7 @@ class BaseCreator: Returns: str: Group label that can be used for grouping of instances in UI. - Group label can be overriden by instance itself. + Group label can be overridden by instance itself. """ if self._cached_group_label is None: @@ -607,18 +607,19 @@ class Creator(BaseCreator): """ # GUI Purposes - # - default_variants may not be used if `get_default_variants` is overriden + # - default_variants may not be used if `get_default_variants` + # is overridden default_variants = [] # Default variant used in 'get_default_variant' _default_variant = None # Short description of product type - # - may not be used if `get_description` is overriden + # - may not be used if `get_description` is overridden description = None # Detailed description of product type for artists - # - may not be used if `get_detail_description` is overriden + # - may not be used if `get_detail_description` is overridden detailed_description = None # It does make sense to change context on creation diff --git a/client/ayon_core/pipeline/create/product_name.py b/client/ayon_core/pipeline/create/product_name.py index 74e268fbb3..fecda867e5 100644 --- a/client/ayon_core/pipeline/create/product_name.py +++ b/client/ayon_core/pipeline/create/product_name.py @@ -1,5 +1,3 @@ -import ayon_api - from ayon_core.settings import get_project_settings from ayon_core.lib import filter_profiles, prepare_template_data diff --git a/client/ayon_core/pipeline/editorial.py b/client/ayon_core/pipeline/editorial.py index 564d78ea6f..84bffbe1ec 100644 --- a/client/ayon_core/pipeline/editorial.py +++ b/client/ayon_core/pipeline/editorial.py @@ -64,7 +64,7 @@ def convert_to_padded_path(path, padding): padding (int): number of padding Returns: - type: string with reformated path + type: string with reformatted path Example: convert_to_padded_path("plate.%d.exr") > plate.%04d.exr diff --git a/client/ayon_core/pipeline/farm/pyblish_functions.pyi b/client/ayon_core/pipeline/farm/pyblish_functions.pyi index 16c11aa480..fe0ae57da0 100644 --- a/client/ayon_core/pipeline/farm/pyblish_functions.pyi +++ b/client/ayon_core/pipeline/farm/pyblish_functions.pyi @@ -1,6 +1,6 @@ import pyblish.api from ayon_core.pipeline import Anatomy -from typing import Tuple, Union, List +from typing import Tuple, List class TimeData: diff --git a/client/ayon_core/pipeline/legacy_io.py b/client/ayon_core/pipeline/legacy_io.py deleted file mode 100644 index d5b555845b..0000000000 --- a/client/ayon_core/pipeline/legacy_io.py +++ /dev/null @@ -1,36 +0,0 @@ -import logging -from ayon_core.pipeline import get_current_project_name - -Session = {} - -log = logging.getLogger(__name__) -log.warning( - "DEPRECATION WARNING: 'legacy_io' is deprecated and will be removed in" - " future versions of ayon-core addon." - "\nReading from Session won't give you updated information and changing" - " values won't affect global state of a process." -) - - -def session_data_from_environment(context_keys=False): - return {} - - -def is_installed(): - return False - - -def install(): - pass - - -def uninstall(): - pass - - -def active_project(*args, **kwargs): - return get_current_project_name() - - -def current_project(*args, **kwargs): - return get_current_project_name() diff --git a/client/ayon_core/pipeline/load/plugins.py b/client/ayon_core/pipeline/load/plugins.py index 91f839ebf3..064af4ddc1 100644 --- a/client/ayon_core/pipeline/load/plugins.py +++ b/client/ayon_core/pipeline/load/plugins.py @@ -2,7 +2,6 @@ import os import logging from ayon_core.settings import get_project_settings -from ayon_core.pipeline import schema from ayon_core.pipeline.plugin_discover import ( discover, register_plugin, @@ -116,7 +115,7 @@ class LoaderPlugin(list): def is_compatible_loader(cls, context): """Return whether a loader is compatible with a context. - On override make sure it is overriden as class or static method. + On override make sure it is overridden as class or static method. This checks the product type and the representation for the given loader plugin. diff --git a/client/ayon_core/pipeline/publish/publish_plugins.py b/client/ayon_core/pipeline/publish/publish_plugins.py index 2386558091..6b1984d92b 100644 --- a/client/ayon_core/pipeline/publish/publish_plugins.py +++ b/client/ayon_core/pipeline/publish/publish_plugins.py @@ -2,7 +2,6 @@ import inspect from abc import ABCMeta import pyblish.api from pyblish.plugin import MetaPlugin, ExplicitMetaPlugin -from ayon_core.lib.transcoding import VIDEO_EXTENSIONS, IMAGE_EXTENSIONS from ayon_core.lib import BoolDef from .lib import ( diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 1d7b5ed5a7..5e63ba444a 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -1865,7 +1865,7 @@ class PlaceholderCreateMixin(object): self.log.debug("Clean up of placeholder is not implemented.") def _before_instance_create(self, placeholder): - """Can be overriden. Is called before instance is created.""" + """Can be overridden. Is called before instance is created.""" pass diff --git a/client/ayon_core/plugins/actions/open_file_explorer.py b/client/ayon_core/plugins/actions/open_file_explorer.py index 69375a7859..6a456c75c1 100644 --- a/client/ayon_core/plugins/actions/open_file_explorer.py +++ b/client/ayon_core/plugins/actions/open_file_explorer.py @@ -18,18 +18,14 @@ class OpenTaskPath(LauncherAction): icon = "folder-open" order = 500 - def is_compatible(self, session): + def is_compatible(self, selection): """Return whether the action is compatible with the session""" - return bool(session.get("AYON_FOLDER_PATH")) + return selection.is_folder_selected - def process(self, session, **kwargs): + def process(self, selection, **kwargs): from qtpy import QtCore, QtWidgets - project_name = session["AYON_PROJECT_NAME"] - folder_path = session["AYON_FOLDER_PATH"] - task_name = session.get("AYON_TASK_NAME", None) - - path = self._get_workdir(project_name, folder_path, task_name) + path = self._get_workdir(selection) if not path: return @@ -60,16 +56,17 @@ class OpenTaskPath(LauncherAction): path = path.split(field, 1)[0] return path - def _get_workdir(self, project_name, folder_path, task_name): - project_entity = ayon_api.get_project(project_name) - folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) - task_entity = ayon_api.get_task_by_name( - project_name, folder_entity["id"], task_name + def _get_workdir(self, selection): + data = get_template_data( + selection.project_entity, + selection.folder_entity, + selection.task_entity ) - data = get_template_data(project_entity, folder_entity, task_entity) - - anatomy = Anatomy(project_name) + anatomy = Anatomy( + selection.project_name, + project_entity=selection.project_entity + ) workdir = anatomy.get_template_item( "work", "default", "folder" ).format(data) diff --git a/client/ayon_core/plugins/load/open_djv.py b/client/ayon_core/plugins/load/open_djv.py deleted file mode 100644 index 30023ac1f5..0000000000 --- a/client/ayon_core/plugins/load/open_djv.py +++ /dev/null @@ -1,64 +0,0 @@ -import os -from ayon_core.lib import ApplicationManager -from ayon_core.pipeline import load - - -def existing_djv_path(): - app_manager = ApplicationManager() - djv_list = [] - - for app_name, app in app_manager.applications.items(): - if 'djv' in app_name and app.find_executable(): - djv_list.append(app_name) - - return djv_list - - -class OpenInDJV(load.LoaderPlugin): - """Open Image Sequence with system default""" - - djv_list = existing_djv_path() - product_types = {"*"} if djv_list else [] - representations = ["*"] - extensions = { - "cin", "dpx", "avi", "dv", "gif", "flv", "mkv", "mov", "mpg", "mpeg", - "mp4", "m4v", "mxf", "iff", "z", "ifl", "jpeg", "jpg", "jfif", "lut", - "1dl", "exr", "pic", "png", "ppm", "pnm", "pgm", "pbm", "rla", "rpf", - "sgi", "rgba", "rgb", "bw", "tga", "tiff", "tif", "img", "h264", - } - - label = "Open in DJV" - order = -10 - icon = "play-circle" - color = "orange" - - def load(self, context, name, namespace, data): - import clique - - path = self.filepath_from_context(context) - directory = os.path.dirname(path) - - pattern = clique.PATTERNS["frames"] - files = os.listdir(directory) - collections, remainder = clique.assemble( - files, - patterns=[pattern], - minimum_items=1 - ) - - if not remainder: - sequence = collections[0] - first_image = list(sequence)[0] - else: - first_image = path - filepath = os.path.normpath(os.path.join(directory, first_image)) - - self.log.info("Opening : {}".format(filepath)) - - last_djv_version = sorted(self.djv_list)[-1] - - app_manager = ApplicationManager() - djv = app_manager.applications.get(last_djv_version) - djv.arguments.append(filepath) - - app_manager.launch(last_djv_version) diff --git a/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py b/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py index e3b27a0db5..f8cc81e718 100644 --- a/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py +++ b/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py @@ -465,7 +465,11 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): current_data = hierarchy_context.get(project_name, {}) for key in folder_path.split("/"): if key: - current_data = current_data.get("childs", {}).get(key, {}) + current_data = ( + current_data + .get("children", {}) + .get(key, {}) + ) tasks_info = current_data.get("tasks", {}) task_info = tasks_info.get(task_name, {}) @@ -529,5 +533,5 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): return item[folder_name].get("tasks") or {} for subitem in item.values(): - hierarchy_queue.extend(subitem.get("childs") or []) + hierarchy_queue.extend(subitem.get("children") or []) return {} diff --git a/client/ayon_core/plugins/publish/collect_hierarchy.py b/client/ayon_core/plugins/publish/collect_hierarchy.py index 7387b1865b..2ae3cc67f3 100644 --- a/client/ayon_core/plugins/publish/collect_hierarchy.py +++ b/client/ayon_core/plugins/publish/collect_hierarchy.py @@ -17,17 +17,18 @@ class CollectHierarchy(pyblish.api.ContextPlugin): hosts = ["resolve", "hiero", "flame"] def process(self, context): - temp_context = {} project_name = context.data["projectName"] - final_context = {} - final_context[project_name] = {} - final_context[project_name]["entity_type"] = "project" - + final_context = { + project_name: { + "entity_type": "project", + "children": {} + }, + } + temp_context = {} for instance in context: self.log.debug("Processing instance: `{}` ...".format(instance)) # shot data dict - shot_data = {} product_type = instance.data["productType"] families = instance.data["families"] @@ -41,34 +42,38 @@ class CollectHierarchy(pyblish.api.ContextPlugin): if not instance.data.get("heroTrack"): continue - # suppose that all instances are Shots - shot_data['entity_type'] = 'Shot' - shot_data['tasks'] = instance.data.get("tasks") or {} - shot_data["comments"] = instance.data.get("comments", []) - - shot_data['custom_attributes'] = { - "handleStart": instance.data["handleStart"], - "handleEnd": instance.data["handleEnd"], - "frameStart": instance.data["frameStart"], - "frameEnd": instance.data["frameEnd"], - "clipIn": instance.data["clipIn"], - "clipOut": instance.data["clipOut"], - "fps": instance.data["fps"], - "resolutionWidth": instance.data["resolutionWidth"], - "resolutionHeight": instance.data["resolutionHeight"], - "pixelAspect": instance.data["pixelAspect"] + shot_data = { + "entity_type": "folder", + # WARNING Default folder type is hardcoded + # suppose that all instances are Shots + "folder_type": "Shot", + "tasks": instance.data.get("tasks") or {}, + "comments": instance.data.get("comments", []), + "attributes": { + "handleStart": instance.data["handleStart"], + "handleEnd": instance.data["handleEnd"], + "frameStart": instance.data["frameStart"], + "frameEnd": instance.data["frameEnd"], + "clipIn": instance.data["clipIn"], + "clipOut": instance.data["clipOut"], + "fps": instance.data["fps"], + "resolutionWidth": instance.data["resolutionWidth"], + "resolutionHeight": instance.data["resolutionHeight"], + "pixelAspect": instance.data["pixelAspect"], + }, } # Split by '/' for AYON where asset is a path name = instance.data["folderPath"].split("/")[-1] actual = {name: shot_data} for parent in reversed(instance.data["parents"]): - next_dict = {} - parent_name = parent["entity_name"] - next_dict[parent_name] = {} - next_dict[parent_name]["entity_type"] = parent[ - "entity_type"].capitalize() - next_dict[parent_name]["childs"] = actual + next_dict = { + parent["entity_name"]: { + "entity_type": "folder", + "folder_type": parent["folder_type"], + "children": actual, + } + } actual = next_dict temp_context = self._update_dict(temp_context, actual) @@ -77,7 +82,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): if not temp_context: return - final_context[project_name]['childs'] = temp_context + final_context[project_name]["children"] = temp_context # adding hierarchy context to context context.data["hierarchyContext"] = final_context @@ -85,8 +90,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): context.data["hierarchyContext"])) def _update_dict(self, parent_dict, child_dict): - """ - Nesting each children into its parent. + """Nesting each child into its parent. Args: parent_dict (dict): parent dict wich should be nested with children diff --git a/client/ayon_core/plugins/publish/collect_host_name.py b/client/ayon_core/plugins/publish/collect_host_name.py index e76579bbd2..ea4ec7ad41 100644 --- a/client/ayon_core/plugins/publish/collect_host_name.py +++ b/client/ayon_core/plugins/publish/collect_host_name.py @@ -1,14 +1,13 @@ """ Requires: None + Provides: - context -> host (str) + context -> hostName (str) """ import os import pyblish.api -from ayon_core.lib import ApplicationManager - class CollectHostName(pyblish.api.ContextPlugin): """Collect avalon host name to context.""" @@ -18,30 +17,8 @@ class CollectHostName(pyblish.api.ContextPlugin): def process(self, context): host_name = context.data.get("hostName") - app_name = context.data.get("appName") - app_label = context.data.get("appLabel") - # Don't override value if is already set - if host_name and app_name and app_label: + if host_name: return # Use AYON_HOST_NAME to get host name if available - if not host_name: - host_name = os.environ.get("AYON_HOST_NAME") - - # Use AYON_APP_NAME to get full app name - if not app_name: - app_name = os.environ.get("AYON_APP_NAME") - - # Fill missing values based on app full name - if (not host_name or not app_label) and app_name: - app_manager = ApplicationManager() - app = app_manager.applications.get(app_name) - if app: - if not host_name: - host_name = app.host_name - if not app_label: - app_label = app.full_label - - context.data["hostName"] = host_name - context.data["appName"] = app_name - context.data["appLabel"] = app_label + context.data["hostName"] = os.environ.get("AYON_HOST_NAME") diff --git a/client/ayon_core/plugins/publish/collect_settings.py b/client/ayon_core/plugins/publish/collect_settings.py index 66b89a114c..db58e7eaa9 100644 --- a/client/ayon_core/plugins/publish/collect_settings.py +++ b/client/ayon_core/plugins/publish/collect_settings.py @@ -1,5 +1,5 @@ from pyblish import api -from ayon_core.settings import get_current_project_settings +from ayon_core.settings import get_project_settings class CollectSettings(api.ContextPlugin): @@ -9,4 +9,9 @@ class CollectSettings(api.ContextPlugin): label = "Collect Settings" def process(self, context): - context.data["project_settings"] = get_current_project_settings() + project_name = context.data["projectName"] + self.log.debug( + "Collecting settings for project: {}".format(project_name) + ) + project_settings = get_project_settings(project_name) + context.data["project_settings"] = project_settings diff --git a/client/ayon_core/plugins/publish/extract_burnin.py b/client/ayon_core/plugins/publish/extract_burnin.py index ab6353a29f..93774842ca 100644 --- a/client/ayon_core/plugins/publish/extract_burnin.py +++ b/client/ayon_core/plugins/publish/extract_burnin.py @@ -27,7 +27,7 @@ class ExtractBurnin(publish.Extractor): Extractor to create video with pre-defined burnins from existing extracted video representation. - It will work only on represenations having `burnin = True` or + It will work only on representations having `burnin = True` or `tags` including `burnin` """ @@ -125,7 +125,7 @@ class ExtractBurnin(publish.Extractor): burnin_defs = copy.deepcopy(src_burnin_defs) - # Filter output definition by `burnin` represetation key + # Filter output definition by `burnin` representation key repre_linked_burnins = [ burnin_def for burnin_def in burnin_defs @@ -194,6 +194,16 @@ class ExtractBurnin(publish.Extractor): ).format(host_name, product_type, task_name, profile)) return + burnins_per_repres = self._get_burnins_per_representations( + instance, burnin_defs + ) + if not burnins_per_repres: + self.log.debug( + "Skipped instance. No representations found matching a burnin" + "definition in: %s", burnin_defs + ) + return + burnin_options = self._get_burnin_options() # Prepare basic data for processing @@ -204,9 +214,6 @@ class ExtractBurnin(publish.Extractor): # Args that will execute the script executable_args = ["run", scriptpath] - burnins_per_repres = self._get_burnins_per_representations( - instance, burnin_defs - ) for repre, repre_burnin_defs in burnins_per_repres: # Create copy of `_burnin_data` and `_temp_data` for repre. burnin_data = copy.deepcopy(_burnin_data) @@ -371,6 +378,7 @@ class ExtractBurnin(publish.Extractor): # Prepare subprocess arguments args = list(executable_args) args.append(temporary_json_filepath) + args.append("--headless") self.log.debug("Executing: {}".format(" ".join(args))) # Run burnin script @@ -540,7 +548,7 @@ class ExtractBurnin(publish.Extractor): return burnin_data, temp_data def repres_is_valid(self, repre): - """Validation if representaion should be processed. + """Validation if representation should be processed. Args: repre (dict): Representation which should be checked. @@ -572,7 +580,7 @@ class ExtractBurnin(publish.Extractor): tags (list): Tags of processed representation. Returns: - list: Containg all burnin definitions matching entered tags. + list: Contain all burnin definitions matching entered tags. """ filtered_burnins = [] @@ -597,7 +605,7 @@ class ExtractBurnin(publish.Extractor): Store data to `temp_data` for keys "full_input_path" which is full path to source files optionally with sequence formatting, - "full_output_path" full path to otput with optionally with sequence + "full_output_path" full path to output with optionally with sequence formatting, "full_input_paths" list of all source files which will be deleted when burnin script ends, "repre_files" list of output filenames. @@ -747,7 +755,7 @@ class ExtractBurnin(publish.Extractor): profile (dict): Profile from presets matching current context. Returns: - list: Containg all valid output definitions. + list: Contain all valid output definitions. """ filtered_burnin_defs = [] @@ -768,7 +776,7 @@ class ExtractBurnin(publish.Extractor): ): self.log.debug(( "Skipped burnin definition \"{}\". Family" - " fiters ({}) does not match current instance families: {}" + " filters ({}) does not match current instance families: {}" ).format( filename_suffix, str(families_filters), str(families) )) diff --git a/client/ayon_core/plugins/publish/extract_hierarchy_to_ayon.py b/client/ayon_core/plugins/publish/extract_hierarchy_to_ayon.py index 59a15af299..60c92aa8b1 100644 --- a/client/ayon_core/plugins/publish/extract_hierarchy_to_ayon.py +++ b/client/ayon_core/plugins/publish/extract_hierarchy_to_ayon.py @@ -115,6 +115,10 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): entity_hub = EntityHub(project_name) project = entity_hub.project_entity + folder_type_name_by_low_name = { + folder_type_item["name"].lower(): folder_type_item["name"] + for folder_type_item in project.get_folder_types() + } hierarchy_match_queue = collections.deque() hierarchy_match_queue.append((project, hierarchy_context)) @@ -167,8 +171,18 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): # TODO check if existing entity have 'folder' type child_entity = children_by_low_name.get(child_name.lower()) if child_entity is None: + folder_type = folder_type_name_by_low_name.get( + child_info["folder_type"].lower() + ) + if folder_type is None: + # TODO add validator for folder type validations + self.log.warning(( + "Couldn't find folder type '{}'" + ).format(child_info["folder_type"])) + folder_type = "Folder" + child_entity = entity_hub.add_new_folder( - child_info["entity_type"], + folder_type, parent_id=entity.id, name=child_name ) @@ -223,12 +237,11 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): # filter only the active publishing instances active_folder_paths = set() for instance in context: - if instance.data.get("publish") is not False: + if instance.data.get("publish", True) is not False: active_folder_paths.add(instance.data.get("folderPath")) active_folder_paths.discard(None) - self.log.debug("Active folder paths: {}".format(active_folder_paths)) if not active_folder_paths: return None @@ -237,11 +250,11 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): hierarchy_context = copy.deepcopy(context.data["hierarchyContext"]) for key, value in hierarchy_context.items(): project_item = copy.deepcopy(value) - project_children_context = project_item.pop("childs", None) + project_children_context = project_item.pop("children", None) project_item["name"] = key project_item["tasks"] = [] project_item["attributes"] = project_item.pop( - "custom_attributes", {} + "attributes", {} ) project_item["children"] = [] @@ -265,22 +278,23 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): folder_path = "{}/{}".format(parent_path, folder_name) if ( folder_path not in active_folder_paths - and not folder_info.get("childs") + and not folder_info.get("children") ): continue item_id = uuid.uuid4().hex new_item = copy.deepcopy(folder_info) + new_children_context = new_item.pop("children", None) + tasks = new_item.pop("tasks", {}) + new_item["name"] = folder_name new_item["children"] = [] - new_children_context = new_item.pop("childs", None) - tasks = new_item.pop("tasks", {}) task_items = [] for task_name, task_info in tasks.items(): task_info["name"] = task_name task_items.append(task_info) new_item["tasks"] = task_items - new_item["attributes"] = new_item.pop("custom_attributes", {}) + new_item["attributes"] = new_item.pop("attributes", {}) items_by_id[item_id] = new_item parent_id_by_item_id[item_id] = parent_id diff --git a/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py b/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py index a19b5b9090..98723beffa 100644 --- a/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py +++ b/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py @@ -80,7 +80,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): # create duration duration = (timeline_out_h - timeline_in_h) + 1 - # ffmpeg generate new file only if doesnt exists already + # ffmpeg generate new file only if doesn't exists already if not recycling_file: # convert to seconds start_sec = float(timeline_in_h / fps) diff --git a/client/ayon_core/plugins/publish/extract_review.py b/client/ayon_core/plugins/publish/extract_review.py index 905158c851..d51a7374d3 100644 --- a/client/ayon_core/plugins/publish/extract_review.py +++ b/client/ayon_core/plugins/publish/extract_review.py @@ -619,7 +619,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # Prepare input and output filepaths self.input_output_paths(new_repre, output_def, temp_data) - # Set output frames len to 1 when ouput is single image + # Set output frames len to 1 when output is single image if ( temp_data["output_ext_is_image"] and not temp_data["output_is_sequence"] @@ -955,7 +955,7 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("New representation ext: `{}`".format(output_ext)) - # Output is image file sequence witht frames + # Output is image file sequence with frames output_ext_is_image = bool(output_ext in self.image_exts) output_is_sequence = bool( output_ext_is_image @@ -967,7 +967,7 @@ class ExtractReview(pyblish.api.InstancePlugin): frame_end = temp_data["output_frame_end"] filename_base = "{}_{}".format(filename, filename_suffix) - # Temporary tempalte for frame filling. Example output: + # Temporary template for frame filling. Example output: # "basename.%04d.exr" when `frame_end` == 1001 repr_file = "{}.%{:0>2}d.{}".format( filename_base, len(str(frame_end)), output_ext @@ -997,7 +997,7 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("Creating dir: {}".format(dst_staging_dir)) os.makedirs(dst_staging_dir) - # Store stagingDir to representaion + # Store stagingDir to representation new_repre["stagingDir"] = dst_staging_dir # Store paths to temp data @@ -1225,19 +1225,13 @@ class ExtractReview(pyblish.api.InstancePlugin): filters = [] # if reformat input video file is already reforamted from upstream - reformat_in_baking = bool("reformated" in new_repre["tags"]) + reformat_in_baking = ( + "reformatted" in new_repre["tags"] + # Backwards compatibility + or "reformated" in new_repre["tags"] + ) self.log.debug("reformat_in_baking: `{}`".format(reformat_in_baking)) - # Get instance data - pixel_aspect = temp_data["pixel_aspect"] - - if reformat_in_baking: - self.log.debug(( - "Using resolution from input. It is already " - "reformated from upstream process" - )) - pixel_aspect = 1 - # NOTE Skipped using instance's resolution full_input_path_single_file = temp_data["full_input_path_single_file"] try: @@ -1268,7 +1262,7 @@ class ExtractReview(pyblish.api.InstancePlugin): if reformat_in_baking: self.log.debug(( "Using resolution from input. It is already " - "reformated from upstream process" + "reformatted from upstream process" )) pixel_aspect = 1 output_width = input_width @@ -1374,7 +1368,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # Make sure output width and height is not an odd number # When this can happen: # - if output definition has set width and height with odd number - # - `instance.data` contain width and height with odd numbeer + # - `instance.data` contain width and height with odd number if output_width % 2 != 0: self.log.warning(( "Converting output width from odd to even number. {} -> {}" @@ -1555,7 +1549,7 @@ class ExtractReview(pyblish.api.InstancePlugin): custom_tags (list): Custom Tags of processed representation. Returns: - list: Containg all output definitions matching entered tags. + list: Containing all output definitions matching entered tags. """ filtered_outputs = [] @@ -1820,8 +1814,8 @@ class OverscanCrop: """ # crop=width:height:x:y - explicit start x, y position # crop=width:height - x, y are related to center by width/height - # pad=width:heigth:x:y - explicit start x, y position - # pad=width:heigth - x, y are set to 0 by default + # pad=width:height:x:y - explicit start x, y position + # pad=width:height - x, y are set to 0 by default width = self.width() height = self.height() @@ -1869,7 +1863,7 @@ class OverscanCrop: # Replace "px" (and spaces before) with single space string_value = re.sub(r"([ ]+)?px", " ", string_value) string_value = re.sub(r"([ ]+)%", "%", string_value) - # Make sure +/- sign at the beggining of string is next to number + # Make sure +/- sign at the beginning of string is next to number string_value = re.sub(r"^([\+\-])[ ]+", "\g<1>", string_value) # Make sure +/- sign in the middle has zero spaces before number under # which belongs diff --git a/client/ayon_core/plugins/publish/help/validate_containers.xml b/client/ayon_core/plugins/publish/help/validate_containers.xml index 5d18bb4c19..321e73a303 100644 --- a/client/ayon_core/plugins/publish/help/validate_containers.xml +++ b/client/ayon_core/plugins/publish/help/validate_containers.xml @@ -10,7 +10,7 @@ Scene contains one or more outdated loaded containers, eg. versions loaded into ### How to repair? Use 'Scene Inventory' and update all highlighted old container to latest OR - refresh Publish and switch 'Validate Containers' toggle on 'Options' tab. +refresh Publish and switch 'Validate Containers' toggle on 'Context' tab. WARNING: Skipping this validator will result in publishing (and probably rendering) old version of loaded assets. diff --git a/client/ayon_core/plugins/publish/integrate_hero_version.py b/client/ayon_core/plugins/publish/integrate_hero_version.py index 7969457697..8c36719b77 100644 --- a/client/ayon_core/plugins/publish/integrate_hero_version.py +++ b/client/ayon_core/plugins/publish/integrate_hero_version.py @@ -90,6 +90,9 @@ class IntegrateHeroVersion( # *but all other plugins must be sucessfully completed def process(self, instance): + if not self.is_active(instance.data): + return + self.log.debug( "--- Integration of Hero version for product `{}` begins.".format( instance.data["productName"] diff --git a/client/ayon_core/plugins/publish/validate_editorial_asset_name.py b/client/ayon_core/plugins/publish/validate_editorial_asset_name.py deleted file mode 100644 index 9b6794a0c4..0000000000 --- a/client/ayon_core/plugins/publish/validate_editorial_asset_name.py +++ /dev/null @@ -1,122 +0,0 @@ -from pprint import pformat - -import ayon_api -import pyblish.api - -from ayon_core.pipeline import KnownPublishError - - -class ValidateEditorialAssetName(pyblish.api.ContextPlugin): - """ Validating if editorial's folder names are not already created in db. - - Checking variations of names with different size of caps or with - or without underscores. - """ - - order = pyblish.api.ValidatorOrder - label = "Validate Editorial Folder Name" - hosts = [ - "hiero", - "resolve", - "flame", - "traypublisher" - ] - - def process(self, context): - - folder_and_parents = self.get_parents(context) - self.log.debug("__ folder_and_parents: {}".format(folder_and_parents)) - - project_name = context.data["projectName"] - folder_entities = list(ayon_api.get_folders( - project_name, fields={"path"} - )) - self.log.debug("__ folder_entities: {}".format(folder_entities)) - - existing_folder_paths = { - folder_entity["path"]: ( - folder_entity["path"].lstrip("/").rsplit("/")[0] - ) - for folder_entity in folder_entities - } - - self.log.debug("__ project_entities: {}".format( - pformat(existing_folder_paths))) - - folders_missing_name = {} - folders_wrong_parent = {} - for folder_path in folder_and_parents.keys(): - if folder_path not in existing_folder_paths.keys(): - # add to some nonexistent list for next layer of check - folders_missing_name[folder_path] = ( - folder_and_parents[folder_path] - ) - continue - - existing_parents = existing_folder_paths[folder_path] - if folder_and_parents[folder_path] != existing_parents: - # add to some nonexistent list for next layer of check - folders_wrong_parent[folder_path] = { - "required": folder_and_parents[folder_path], - "already_in_db": existing_folder_paths[folder_path] - } - continue - - self.log.debug("correct folder: {}".format(folder_path)) - - if folders_missing_name: - wrong_names = {} - self.log.debug( - ">> folders_missing_name: {}".format(folders_missing_name)) - - # This will create set of folder paths - folder_paths = { - folder_path.lower().replace("_", "") - for folder_path in existing_folder_paths - } - - for folder_path in folders_missing_name: - _folder_path = folder_path.lower().replace("_", "") - if _folder_path in folder_paths: - wrong_names[folder_path].update( - { - "required_name": folder_path, - "used_variants_in_db": [ - p - for p in existing_folder_paths - if p.lower().replace("_", "") == _folder_path - ] - } - ) - - if wrong_names: - self.log.debug( - ">> wrong_names: {}".format(wrong_names)) - raise Exception( - "Some already existing folder name variants `{}`".format( - wrong_names)) - - if folders_wrong_parent: - self.log.debug( - ">> folders_wrong_parent: {}".format(folders_wrong_parent)) - raise KnownPublishError( - "Wrong parents on folders `{}`".format(folders_wrong_parent)) - - def get_parents(self, context): - output = {} - for instance in context: - folder_path = instance.data["folderPath"] - families = instance.data.get("families", []) + [ - instance.data["family"] - ] - # filter out non-shot families - if "shot" not in families: - continue - - parents = instance.data["parents"] - - output[folder_path] = [ - str(p["entity_name"]) for p in parents - if p["entity_type"].lower() != "project" - ] - return output diff --git a/client/ayon_core/scripts/slates/slate_base/api.py b/client/ayon_core/scripts/slates/slate_base/api.py index cd64c68134..d1b4b22979 100644 --- a/client/ayon_core/scripts/slates/slate_base/api.py +++ b/client/ayon_core/scripts/slates/slate_base/api.py @@ -13,3 +13,21 @@ from .items import ( ) from .lib import create_slates from .example import example + + +__all__ = ( + "FontFactory", + "BaseObj", + "load_default_style", + "MainFrame", + "Layer", + "BaseItem", + "ItemImage", + "ItemRectangle", + "ItemPlaceHolder", + "ItemText", + "ItemTable", + "TableField", + "create_slates", + "example", +) diff --git a/client/ayon_core/settings/lib.py b/client/ayon_core/settings/lib.py index d72e4f357a..3929818d31 100644 --- a/client/ayon_core/settings/lib.py +++ b/client/ayon_core/settings/lib.py @@ -201,7 +201,7 @@ def get_current_project_settings(): Project name should be stored in environment variable `AYON_PROJECT_NAME`. This function should be used only in host context where environment variable must be set and should not happen that any part of process will - change the value of the enviornment variable. + change the value of the environment variable. """ project_name = os.environ.get("AYON_PROJECT_NAME") if not project_name: @@ -209,6 +209,3 @@ def get_current_project_settings(): "Missing context project in environemt variable `AYON_PROJECT_NAME`." ) return get_project_settings(project_name) - - - diff --git a/client/ayon_core/tools/common_models/thumbnails.py b/client/ayon_core/tools/common_models/thumbnails.py index 138cee4ea2..1c3aadc49f 100644 --- a/client/ayon_core/tools/common_models/thumbnails.py +++ b/client/ayon_core/tools/common_models/thumbnails.py @@ -112,7 +112,7 @@ class ThumbnailsCache: """ thumbnails_dir = self.get_thumbnails_dir() - # Skip if thumbnails dir does not exists yet + # Skip if thumbnails dir does not exist yet if not os.path.exists(thumbnails_dir): return diff --git a/client/ayon_core/tools/launcher/models/actions.py b/client/ayon_core/tools/launcher/models/actions.py index 97943e6ad7..32df600c87 100644 --- a/client/ayon_core/tools/launcher/models/actions.py +++ b/client/ayon_core/tools/launcher/models/actions.py @@ -2,9 +2,11 @@ import os from ayon_core import resources from ayon_core.lib import Logger, AYONSettingsRegistry +from ayon_core.addon import AddonsManager from ayon_core.pipeline.actions import ( discover_launcher_actions, LauncherAction, + LauncherActionSelection, ) from ayon_core.pipeline.workfile import should_use_last_workfile_on_launch @@ -69,11 +71,6 @@ class ApplicationAction(LauncherAction): project_entities = {} _log = None - required_session_keys = ( - "AYON_PROJECT_NAME", - "AYON_FOLDER_PATH", - "AYON_TASK_NAME" - ) @property def log(self): @@ -81,18 +78,16 @@ class ApplicationAction(LauncherAction): self._log = Logger.get_logger(self.__class__.__name__) return self._log - def is_compatible(self, session): - for key in self.required_session_keys: - if not session.get(key): - return False + def is_compatible(self, selection): + if not selection.is_task_selected: + return False - project_name = session["AYON_PROJECT_NAME"] - project_entity = self.project_entities[project_name] + project_entity = self.project_entities[selection.project_name] apps = project_entity["attrib"].get("applications") if not apps or self.application.full_name not in apps: return False - project_settings = self.project_settings[project_name] + project_settings = self.project_settings[selection.project_name] only_available = project_settings["applications"]["only_available"] if only_available and not self.application.find_executable(): return False @@ -112,26 +107,23 @@ class ApplicationAction(LauncherAction): dialog.setDetailedText(details) dialog.exec_() - def process(self, session, **kwargs): + def process(self, selection, **kwargs): """Process the full Application action""" - from ayon_core.lib import ( - ApplictionExecutableNotFound, + from ayon_applications import ( + ApplicationExecutableNotFound, ApplicationLaunchFailed, ) - project_name = session["AYON_PROJECT_NAME"] - folder_path = session["AYON_FOLDER_PATH"] - task_name = session["AYON_TASK_NAME"] try: self.application.launch( - project_name=project_name, - folder_path=folder_path, - task_name=task_name, + project_name=selection.project_name, + folder_path=selection.folder_path, + task_name=selection.task_name, **self.data ) - except ApplictionExecutableNotFound as exc: + except ApplicationExecutableNotFound as exc: details = exc.details msg = exc.msg log_msg = str(msg) @@ -279,6 +271,8 @@ class ActionsModel: self._launcher_tool_reg = AYONSettingsRegistry("launcher_tool") + self._addons_manager = None + @property def log(self): if self._log is None: @@ -335,11 +329,11 @@ class ActionsModel: """ not_open_workfile_actions = self._get_no_last_workfile_for_context( project_name, folder_id, task_id) - session = self._prepare_session(project_name, folder_id, task_id) + selection = self._prepare_selection(project_name, folder_id, task_id) output = [] action_items = self._get_action_items(project_name) for identifier, action in self._get_action_objects().items(): - if not action.is_compatible(session): + if not action.is_compatible(selection): continue action_item = action_items[identifier] @@ -374,7 +368,7 @@ class ActionsModel: ) def trigger_action(self, project_name, folder_id, task_id, identifier): - session = self._prepare_session(project_name, folder_id, task_id) + selection = self._prepare_selection(project_name, folder_id, task_id) failed = False error_message = None action_label = identifier @@ -403,7 +397,7 @@ class ActionsModel: ) action.data["start_last_workfile"] = start_last_workfile - action.process(session) + action.process(selection) except Exception as exc: self.log.warning("Action trigger failed.", exc_info=True) failed = True @@ -419,6 +413,11 @@ class ActionsModel: } ) + def _get_addons_manager(self): + if self._addons_manager is None: + self._addons_manager = AddonsManager() + return self._addons_manager + def _get_no_last_workfile_reg_data(self): try: no_workfile_reg_data = self._launcher_tool_reg.get_item( @@ -440,29 +439,8 @@ class ActionsModel: .get(task_id, {}) ) - def _prepare_session(self, project_name, folder_id, task_id): - folder_path = None - if folder_id: - folder = self._controller.get_folder_entity( - project_name, folder_id) - if folder: - folder_path = folder["path"] - - task_name = None - if task_id: - task = self._controller.get_task_entity(project_name, task_id) - if task: - task_name = task["name"] - - return { - "AYON_PROJECT_NAME": project_name, - "AYON_FOLDER_PATH": folder_path, - "AYON_TASK_NAME": task_name, - # Deprecated - kept for backwards compatibility - "AVALON_PROJECT": project_name, - "AVALON_ASSET": folder_path, - "AVALON_TASK": task_name, - } + def _prepare_selection(self, project_name, folder_id, task_id): + return LauncherActionSelection(project_name, folder_id, task_id) def _get_discovered_action_classes(self): if self._discovered_actions is None: @@ -519,19 +497,16 @@ class ActionsModel: return action_items def _get_applications_action_classes(self): - from ayon_core.lib.applications import ( - CUSTOM_LAUNCH_APP_GROUPS, - ApplicationManager, - ) - actions = [] - manager = ApplicationManager() + addons_manager = self._get_addons_manager() + applications_addon = addons_manager.get_enabled_addon("applications") + if applications_addon is None: + return actions + + manager = applications_addon.get_applications_manager() for full_name, application in manager.applications.items(): - if ( - application.group.name in CUSTOM_LAUNCH_APP_GROUPS - or not application.enabled - ): + if not application.enabled: continue action = type( diff --git a/client/ayon_core/tools/loader/abstract.py b/client/ayon_core/tools/loader/abstract.py index 33add0213b..7a7d335092 100644 --- a/client/ayon_core/tools/loader/abstract.py +++ b/client/ayon_core/tools/loader/abstract.py @@ -871,7 +871,7 @@ class FrontendLoaderController(_BaseLoaderController): # Site sync functions @abstractmethod - def is_site_sync_enabled(self, project_name=None): + def is_sitesync_enabled(self, project_name=None): """Is site sync enabled. Site sync addon can be enabled but can be disabled per project. diff --git a/client/ayon_core/tools/loader/control.py b/client/ayon_core/tools/loader/control.py index d8562f50ca..0c9bb369c7 100644 --- a/client/ayon_core/tools/loader/control.py +++ b/client/ayon_core/tools/loader/control.py @@ -113,7 +113,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): self._products_model = ProductsModel(self) self._loader_actions_model = LoaderActionsModel(self) self._thumbnails_model = ThumbnailsModel() - self._site_sync_model = SiteSyncModel(self) + self._sitesync_model = SiteSyncModel(self) @property def log(self): @@ -149,7 +149,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): self._loader_actions_model.reset() self._projects_model.reset() self._thumbnails_model.reset() - self._site_sync_model.reset() + self._sitesync_model.reset() self._projects_model.refresh() @@ -240,7 +240,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): project_name, representation_ids) ) - action_items.extend(self._site_sync_model.get_site_sync_action_items( + action_items.extend(self._sitesync_model.get_sitesync_action_items( project_name, representation_ids) ) @@ -254,8 +254,8 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): version_ids, representation_ids ): - if self._site_sync_model.is_site_sync_action(identifier): - self._site_sync_model.trigger_action_item( + if self._sitesync_model.is_sitesync_action(identifier): + self._sitesync_model.trigger_action_item( identifier, project_name, representation_ids @@ -368,24 +368,24 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): self._loaded_products_cache.update_data(product_ids) return self._loaded_products_cache.get_data() - def is_site_sync_enabled(self, project_name=None): - return self._site_sync_model.is_site_sync_enabled(project_name) + def is_sitesync_enabled(self, project_name=None): + return self._sitesync_model.is_sitesync_enabled(project_name) def get_active_site_icon_def(self, project_name): - return self._site_sync_model.get_active_site_icon_def(project_name) + return self._sitesync_model.get_active_site_icon_def(project_name) def get_remote_site_icon_def(self, project_name): - return self._site_sync_model.get_remote_site_icon_def(project_name) + return self._sitesync_model.get_remote_site_icon_def(project_name) def get_version_sync_availability(self, project_name, version_ids): - return self._site_sync_model.get_version_sync_availability( + return self._sitesync_model.get_version_sync_availability( project_name, version_ids ) def get_representations_sync_status( self, project_name, representation_ids ): - return self._site_sync_model.get_representations_sync_status( + return self._sitesync_model.get_representations_sync_status( project_name, representation_ids ) diff --git a/client/ayon_core/tools/loader/models/__init__.py b/client/ayon_core/tools/loader/models/__init__.py index 8e640659a0..10fd3da4d3 100644 --- a/client/ayon_core/tools/loader/models/__init__.py +++ b/client/ayon_core/tools/loader/models/__init__.py @@ -1,7 +1,7 @@ from .selection import SelectionModel from .products import ProductsModel from .actions import LoaderActionsModel -from .site_sync import SiteSyncModel +from .sitesync import SiteSyncModel __all__ = ( diff --git a/client/ayon_core/tools/loader/models/site_sync.py b/client/ayon_core/tools/loader/models/sitesync.py similarity index 90% rename from client/ayon_core/tools/loader/models/site_sync.py rename to client/ayon_core/tools/loader/models/sitesync.py index a589cf7fbe..987510905b 100644 --- a/client/ayon_core/tools/loader/models/site_sync.py +++ b/client/ayon_core/tools/loader/models/sitesync.py @@ -36,7 +36,7 @@ class SiteSyncModel: self._controller = controller self._site_icons = None - self._site_sync_enabled_cache = NestedCacheItem( + self._sitesync_enabled_cache = NestedCacheItem( levels=1, lifetime=self.lifetime ) self._active_site_cache = NestedCacheItem( @@ -57,17 +57,17 @@ class SiteSyncModel: ) manager = AddonsManager() - self._site_sync_addon = manager.get("sync_server") + self._sitesync_addon = manager.get("sitesync") def reset(self): self._site_icons = None - self._site_sync_enabled_cache.reset() + self._sitesync_enabled_cache.reset() self._active_site_cache.reset() self._remote_site_cache.reset() self._version_availability_cache.reset() self._repre_status_cache.reset() - def is_site_sync_enabled(self, project_name=None): + def is_sitesync_enabled(self, project_name=None): """Site sync is enabled for a project. Returns false if site sync addon is not available or enabled @@ -82,13 +82,13 @@ class SiteSyncModel: bool: Site sync is enabled. """ - if not self._is_site_sync_addon_enabled(): + if not self._is_sitesync_addon_enabled(): return False - cache = self._site_sync_enabled_cache[project_name] + cache = self._sitesync_enabled_cache[project_name] if not cache.is_valid: enabled = True if project_name: - enabled = self._site_sync_addon.is_project_enabled( + enabled = self._sitesync_addon.is_project_enabled( project_name, single=True ) cache.update_data(enabled) @@ -107,8 +107,8 @@ class SiteSyncModel: cache = self._active_site_cache[project_name] if not cache.is_valid: site_name = None - if project_name and self._is_site_sync_addon_enabled(): - site_name = self._site_sync_addon.get_active_site(project_name) + if project_name and self._is_sitesync_addon_enabled(): + site_name = self._sitesync_addon.get_active_site(project_name) cache.update_data(site_name) return cache.get_data() @@ -125,8 +125,8 @@ class SiteSyncModel: cache = self._remote_site_cache[project_name] if not cache.is_valid: site_name = None - if project_name and self._is_site_sync_addon_enabled(): - site_name = self._site_sync_addon.get_remote_site(project_name) + if project_name and self._is_sitesync_addon_enabled(): + site_name = self._sitesync_addon.get_remote_site(project_name) cache.update_data(site_name) return cache.get_data() @@ -140,7 +140,7 @@ class SiteSyncModel: Union[dict[str, Any], None]: Site icon definition. """ - if not project_name or not self.is_site_sync_enabled(project_name): + if not project_name or not self.is_sitesync_enabled(project_name): return None active_site = self.get_active_site(project_name) return self._get_site_icon_def(project_name, active_site) @@ -155,14 +155,14 @@ class SiteSyncModel: Union[dict[str, Any], None]: Site icon definition. """ - if not project_name or not self.is_site_sync_enabled(project_name): + if not project_name or not self.is_sitesync_enabled(project_name): return None remote_site = self.get_remote_site(project_name) return self._get_site_icon_def(project_name, remote_site) def _get_site_icon_def(self, project_name, site_name): # use different icon for studio even if provider is 'local_drive' - if site_name == self._site_sync_addon.DEFAULT_SITE: + if site_name == self._sitesync_addon.DEFAULT_SITE: provider = "studio" else: provider = self._get_provider_for_site(project_name, site_name) @@ -179,7 +179,7 @@ class SiteSyncModel: dict[str, tuple[int, int]] """ - if not self.is_site_sync_enabled(project_name): + if not self.is_sitesync_enabled(project_name): return { version_id: _default_version_availability() for version_id in version_ids @@ -217,7 +217,7 @@ class SiteSyncModel: dict[str, tuple[float, float]] """ - if not self.is_site_sync_enabled(project_name): + if not self.is_sitesync_enabled(project_name): return { repre_id: _default_repre_status() for repre_id in representation_ids @@ -242,7 +242,7 @@ class SiteSyncModel: output[repre_id] = repre_cache.get_data() return output - def get_site_sync_action_items(self, project_name, representation_ids): + def get_sitesync_action_items(self, project_name, representation_ids): """ Args: @@ -253,7 +253,7 @@ class SiteSyncModel: list[ActionItem]: Actions that can be shown in loader. """ - if not self.is_site_sync_enabled(project_name): + if not self.is_sitesync_enabled(project_name): return [] repres_status = self.get_representations_sync_status( @@ -289,7 +289,7 @@ class SiteSyncModel: return action_items - def is_site_sync_action(self, identifier): + def is_sitesync_action(self, identifier): """Should be `identifier` handled by SiteSync. Args: @@ -353,22 +353,22 @@ class SiteSyncModel: ) elif identifier == REMOVE_IDENTIFIER: - self._site_sync_addon.remove_site( + self._sitesync_addon.remove_site( project_name, repre_id, active_site, remove_local_files=True ) - def _is_site_sync_addon_enabled(self): + def _is_sitesync_addon_enabled(self): """ Returns: bool: Site sync addon is enabled. """ - if self._site_sync_addon is None: + if self._sitesync_addon is None: return False - return self._site_sync_addon.enabled + return self._sitesync_addon.enabled def _get_provider_for_site(self, project_name, site_name): """Provider for a site. @@ -381,9 +381,9 @@ class SiteSyncModel: Union[str, None]: Provider name. """ - if not self._is_site_sync_addon_enabled(): + if not self._is_sitesync_addon_enabled(): return None - return self._site_sync_addon.get_provider_for_site( + return self._sitesync_addon.get_provider_for_site( project_name, site_name ) @@ -398,7 +398,7 @@ class SiteSyncModel: return None if self._site_icons is None: - self._site_icons = self._site_sync_addon.get_site_icons() + self._site_icons = self._sitesync_addon.get_site_icons() return self._site_icons.get(provider) def _refresh_version_availability(self, project_name, version_ids): @@ -406,7 +406,7 @@ class SiteSyncModel: return project_cache = self._version_availability_cache[project_name] - avail_by_id = self._site_sync_addon.get_version_availability( + avail_by_id = self._sitesync_addon.get_version_availability( project_name, version_ids, self.get_active_site(project_name), @@ -425,7 +425,7 @@ class SiteSyncModel: return project_cache = self._repre_status_cache[project_name] status_by_repre_id = ( - self._site_sync_addon.get_representations_sync_state( + self._sitesync_addon.get_representations_sync_state( project_name, representation_ids, self.get_active_site(project_name), @@ -496,7 +496,7 @@ class SiteSyncModel: ) def _add_site(self, project_name, repre_entity, site_name, product_type): - self._site_sync_addon.add_site( + self._sitesync_addon.add_site( project_name, repre_entity["id"], site_name, force=True ) @@ -513,7 +513,7 @@ class SiteSyncModel: try: print("Adding {} to linked representation: {}".format( site_name, link_repre_id)) - self._site_sync_addon.add_site( + self._sitesync_addon.add_site( project_name, link_repre_id, site_name, diff --git a/client/ayon_core/tools/loader/ui/products_model.py b/client/ayon_core/tools/loader/ui/products_model.py index c51172849a..41342ba0df 100644 --- a/client/ayon_core/tools/loader/ui/products_model.py +++ b/client/ayon_core/tools/loader/ui/products_model.py @@ -73,7 +73,7 @@ class ProductsModel(QtGui.QStandardItemModel): published_time_col = column_labels.index("Time") folders_label_col = column_labels.index("Folder") in_scene_col = column_labels.index("In scene") - site_sync_avail_col = column_labels.index("Availability") + sitesync_avail_col = column_labels.index("Availability") def __init__(self, controller): super(ProductsModel, self).__init__() diff --git a/client/ayon_core/tools/loader/ui/products_widget.py b/client/ayon_core/tools/loader/ui/products_widget.py index 3025ec18bd..d9f027153e 100644 --- a/client/ayon_core/tools/loader/ui/products_widget.py +++ b/client/ayon_core/tools/loader/ui/products_widget.py @@ -139,9 +139,9 @@ class ProductsWidget(QtWidgets.QWidget): products_view.setItemDelegateForColumn( products_model.in_scene_col, in_scene_delegate) - site_sync_delegate = SiteSyncDelegate() + sitesync_delegate = SiteSyncDelegate() products_view.setItemDelegateForColumn( - products_model.site_sync_avail_col, site_sync_delegate) + products_model.sitesync_avail_col, sitesync_delegate) main_layout = QtWidgets.QHBoxLayout(self) main_layout.setContentsMargins(0, 0, 0, 0) @@ -176,7 +176,7 @@ class ProductsWidget(QtWidgets.QWidget): self._version_delegate = version_delegate self._time_delegate = time_delegate self._in_scene_delegate = in_scene_delegate - self._site_sync_delegate = site_sync_delegate + self._sitesync_delegate = sitesync_delegate self._selected_project_name = None self._selected_folder_ids = set() @@ -192,8 +192,8 @@ class ProductsWidget(QtWidgets.QWidget): products_model.in_scene_col, not controller.is_loaded_products_supported() ) - self._set_site_sync_visibility( - self._controller.is_site_sync_enabled() + self._set_sitesync_visibility( + self._controller.is_sitesync_enabled() ) def set_name_filter(self, name): @@ -229,10 +229,10 @@ class ProductsWidget(QtWidgets.QWidget): def refresh(self): self._refresh_model() - def _set_site_sync_visibility(self, site_sync_enabled): + def _set_sitesync_visibility(self, sitesync_enabled): self._products_view.setColumnHidden( - self._products_model.site_sync_avail_col, - not site_sync_enabled + self._products_model.sitesync_avail_col, + not sitesync_enabled ) def _fill_version_editor(self): @@ -395,10 +395,10 @@ class ProductsWidget(QtWidgets.QWidget): def _on_folders_selection_change(self, event): project_name = event["project_name"] - site_sync_enabled = self._controller.is_site_sync_enabled( + sitesync_enabled = self._controller.is_sitesync_enabled( project_name ) - self._set_site_sync_visibility(site_sync_enabled) + self._set_sitesync_visibility(sitesync_enabled) self._selected_project_name = project_name self._selected_folder_ids = event["folder_ids"] self._refresh_model() diff --git a/client/ayon_core/tools/loader/ui/repres_widget.py b/client/ayon_core/tools/loader/ui/repres_widget.py index 3b6b8f94bf..d19ad306a3 100644 --- a/client/ayon_core/tools/loader/ui/repres_widget.py +++ b/client/ayon_core/tools/loader/ui/repres_widget.py @@ -307,8 +307,8 @@ class RepresentationsWidget(QtWidgets.QWidget): self._repre_model = repre_model self._repre_proxy_model = repre_proxy_model - self._set_site_sync_visibility( - self._controller.is_site_sync_enabled() + self._set_sitesync_visibility( + self._controller.is_sitesync_enabled() ) self._set_multiple_folders_selected(False) @@ -320,19 +320,19 @@ class RepresentationsWidget(QtWidgets.QWidget): def _on_project_change(self, event): self._selected_project_name = event["project_name"] - site_sync_enabled = self._controller.is_site_sync_enabled( + sitesync_enabled = self._controller.is_sitesync_enabled( self._selected_project_name ) - self._set_site_sync_visibility(site_sync_enabled) + self._set_sitesync_visibility(sitesync_enabled) - def _set_site_sync_visibility(self, site_sync_enabled): + def _set_sitesync_visibility(self, sitesync_enabled): self._repre_view.setColumnHidden( self._repre_model.active_site_column, - not site_sync_enabled + not sitesync_enabled ) self._repre_view.setColumnHidden( self._repre_model.remote_site_column, - not site_sync_enabled + not sitesync_enabled ) def _set_multiple_folders_selected(self, selected_multiple_folders): diff --git a/client/ayon_core/tools/publisher/widgets/create_context_widgets.py b/client/ayon_core/tools/publisher/widgets/create_context_widgets.py index 61223bbe75..235a778d0f 100644 --- a/client/ayon_core/tools/publisher/widgets/create_context_widgets.py +++ b/client/ayon_core/tools/publisher/widgets/create_context_widgets.py @@ -1,4 +1,4 @@ -from qtpy import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore from ayon_core.lib.events import QueuedEventSystem from ayon_core.tools.utils import PlaceholderLineEdit, GoToCurrentButton diff --git a/client/ayon_core/tools/publisher/widgets/folders_dialog.py b/client/ayon_core/tools/publisher/widgets/folders_dialog.py index 03336e10a6..8dce7aba3a 100644 --- a/client/ayon_core/tools/publisher/widgets/folders_dialog.py +++ b/client/ayon_core/tools/publisher/widgets/folders_dialog.py @@ -1,4 +1,4 @@ -from qtpy import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets from ayon_core.lib.events import QueuedEventSystem from ayon_core.tools.utils import PlaceholderLineEdit, FoldersWidget diff --git a/client/ayon_core/tools/publisher/widgets/publish_frame.py b/client/ayon_core/tools/publisher/widgets/publish_frame.py index d423f97047..ee65c69c19 100644 --- a/client/ayon_core/tools/publisher/widgets/publish_frame.py +++ b/client/ayon_core/tools/publisher/widgets/publish_frame.py @@ -1,7 +1,3 @@ -import os -import json -import time - from qtpy import QtWidgets, QtCore from .widgets import ( diff --git a/client/ayon_core/tools/publisher/widgets/tasks_model.py b/client/ayon_core/tools/publisher/widgets/tasks_model.py index e36de80fcf..78b1f23b17 100644 --- a/client/ayon_core/tools/publisher/widgets/tasks_model.py +++ b/client/ayon_core/tools/publisher/widgets/tasks_model.py @@ -1,4 +1,4 @@ -from qtpy import QtWidgets, QtCore, QtGui +from qtpy import QtCore, QtGui from ayon_core.style import get_default_entity_icon_color from ayon_core.tools.utils import get_qt_icon diff --git a/client/ayon_core/tools/pyblish_pype/util.py b/client/ayon_core/tools/pyblish_pype/util.py index 8126637060..09a370c6e4 100644 --- a/client/ayon_core/tools/pyblish_pype/util.py +++ b/client/ayon_core/tools/pyblish_pype/util.py @@ -7,8 +7,6 @@ from __future__ import ( import os import sys -import numbers -import copy import collections from qtpy import QtCore @@ -39,7 +37,7 @@ def defer(delay, func): This aids in keeping the GUI responsive, but complicates logic when producing tests. To combat this, the environment variable ensures - that every operation is synchonous. + that every operation is synchronous. Arguments: delay (float): Delay multiplier; default 1, 0 means no delay diff --git a/client/ayon_core/tools/sceneinventory/control.py b/client/ayon_core/tools/sceneinventory/control.py index 77f4d60b22..592113455c 100644 --- a/client/ayon_core/tools/sceneinventory/control.py +++ b/client/ayon_core/tools/sceneinventory/control.py @@ -28,7 +28,7 @@ class SceneInventoryController: self._current_folder_id = None self._current_folder_set = False - self._site_sync_model = SiteSyncModel(self) + self._sitesync_model = SiteSyncModel(self) # Switch dialog requirements self._hierarchy_model = HierarchyModel(self) self._event_system = self._create_event_system() @@ -47,7 +47,7 @@ class SceneInventoryController: self._current_folder_id = None self._current_folder_set = False - self._site_sync_model.reset() + self._sitesync_model.reset() self._hierarchy_model.reset() def get_current_context(self): @@ -89,22 +89,22 @@ class SceneInventoryController: return [] # Site Sync methods - def is_sync_server_enabled(self): - return self._site_sync_model.is_sync_server_enabled() + def is_sitesync_enabled(self): + return self._sitesync_model.is_sitesync_enabled() def get_sites_information(self): - return self._site_sync_model.get_sites_information() + return self._sitesync_model.get_sites_information() def get_site_provider_icons(self): - return self._site_sync_model.get_site_provider_icons() + return self._sitesync_model.get_site_provider_icons() def get_representations_site_progress(self, representation_ids): - return self._site_sync_model.get_representations_site_progress( + return self._sitesync_model.get_representations_site_progress( representation_ids ) def resync_representations(self, representation_ids, site_type): - return self._site_sync_model.resync_representations( + return self._sitesync_model.resync_representations( representation_ids, site_type ) diff --git a/client/ayon_core/tools/sceneinventory/models/__init__.py b/client/ayon_core/tools/sceneinventory/models/__init__.py index c861d3c1a0..f840a45aa8 100644 --- a/client/ayon_core/tools/sceneinventory/models/__init__.py +++ b/client/ayon_core/tools/sceneinventory/models/__init__.py @@ -1,4 +1,4 @@ -from .site_sync import SiteSyncModel +from .sitesync import SiteSyncModel __all__ = ( diff --git a/client/ayon_core/tools/sceneinventory/models/site_sync.py b/client/ayon_core/tools/sceneinventory/models/sitesync.py similarity index 75% rename from client/ayon_core/tools/sceneinventory/models/site_sync.py rename to client/ayon_core/tools/sceneinventory/models/sitesync.py index 7f09f2b25b..1a1f08bf02 100644 --- a/client/ayon_core/tools/sceneinventory/models/site_sync.py +++ b/client/ayon_core/tools/sceneinventory/models/sitesync.py @@ -9,30 +9,30 @@ class SiteSyncModel: def __init__(self, controller): self._controller = controller - self._sync_server_module = NOT_SET - self._sync_server_enabled = None + self._sitesync_addon = NOT_SET + self._sitesync_enabled = None self._active_site = NOT_SET self._remote_site = NOT_SET self._active_site_provider = NOT_SET self._remote_site_provider = NOT_SET def reset(self): - self._sync_server_module = NOT_SET - self._sync_server_enabled = None + self._sitesync_addon = NOT_SET + self._sitesync_enabled = None self._active_site = NOT_SET self._remote_site = NOT_SET self._active_site_provider = NOT_SET self._remote_site_provider = NOT_SET - def is_sync_server_enabled(self): + def is_sitesync_enabled(self): """Site sync is enabled. Returns: bool: Is enabled or not. """ - self._cache_sync_server_module() - return self._sync_server_enabled + self._cache_sitesync_addon() + return self._sitesync_enabled def get_site_provider_icons(self): """Icon paths per provider. @@ -41,10 +41,10 @@ class SiteSyncModel: dict[str, str]: Path by provider name. """ - if not self.is_sync_server_enabled(): + if not self.is_sitesync_enabled(): return {} - site_sync_addon = self._get_sync_server_module() - return site_sync_addon.get_site_icons() + sitesync_addon = self._get_sitesync_addon() + return sitesync_addon.get_site_icons() def get_sites_information(self): return { @@ -65,11 +65,11 @@ class SiteSyncModel: } for repre_id in representation_ids } - if not self.is_sync_server_enabled(): + if not self.is_sitesync_enabled(): return output project_name = self._controller.get_current_project_name() - site_sync = self._get_sync_server_module() + sitesync_addon = self._get_sitesync_addon() repre_entities = ayon_api.get_representations( project_name, representation_ids ) @@ -78,7 +78,7 @@ class SiteSyncModel: for repre_entity in repre_entities: repre_output = output[repre_entity["id"]] - result = site_sync.get_progress_for_repre( + result = sitesync_addon.get_progress_for_repre( repre_entity, active_site, remote_site ) repre_output["active_site"] = result[active_site] @@ -95,7 +95,7 @@ class SiteSyncModel: """ project_name = self._controller.get_current_project_name() - site_sync = self._get_sync_server_module() + sitesync_addon = self._get_sitesync_addon() active_site = self._get_active_site() remote_site = self._get_remote_site() progress = self.get_representations_site_progress( @@ -115,22 +115,22 @@ class SiteSyncModel: site = remote_site if check_progress == 1: - site_sync.add_site( + sitesync_addon.add_site( project_name, repre_id, site, force=True ) - def _get_sync_server_module(self): - self._cache_sync_server_module() - return self._sync_server_module + def _get_sitesync_addon(self): + self._cache_sitesync_addon() + return self._sitesync_addon - def _cache_sync_server_module(self): - if self._sync_server_module is not NOT_SET: - return self._sync_server_module + def _cache_sitesync_addon(self): + if self._sitesync_addon is not NOT_SET: + return self._sitesync_addon manager = AddonsManager() - site_sync = manager.get("sync_server") - sync_enabled = site_sync is not None and site_sync.enabled - self._sync_server_module = site_sync - self._sync_server_enabled = sync_enabled + sitesync_addon = manager.get("sitesync") + sync_enabled = sitesync_addon is not None and sitesync_addon.enabled + self._sitesync_addon = sitesync_addon + self._sitesync_enabled = sync_enabled def _get_active_site(self): if self._active_site is NOT_SET: @@ -157,19 +157,19 @@ class SiteSyncModel: remote_site = None active_site_provider = None remote_site_provider = None - if self.is_sync_server_enabled(): - site_sync = self._get_sync_server_module() + if self.is_sitesync_enabled(): + sitesync_addon = self._get_sitesync_addon() project_name = self._controller.get_current_project_name() - active_site = site_sync.get_active_site(project_name) - remote_site = site_sync.get_remote_site(project_name) + active_site = sitesync_addon.get_active_site(project_name) + remote_site = sitesync_addon.get_remote_site(project_name) active_site_provider = "studio" remote_site_provider = "studio" if active_site != "studio": - active_site_provider = site_sync.get_provider_for_site( + active_site_provider = sitesync_addon.get_provider_for_site( project_name, active_site ) if remote_site != "studio": - remote_site_provider = site_sync.get_provider_for_site( + remote_site_provider = sitesync_addon.get_provider_for_site( project_name, remote_site ) diff --git a/client/ayon_core/tools/sceneinventory/view.py b/client/ayon_core/tools/sceneinventory/view.py index d576bdc139..5cbd4daf70 100644 --- a/client/ayon_core/tools/sceneinventory/view.py +++ b/client/ayon_core/tools/sceneinventory/view.py @@ -311,9 +311,9 @@ class SceneInventoryView(QtWidgets.QTreeView): menu.addAction(remove_action) - self._handle_sync_server(menu, repre_ids) + self._handle_sitesync(menu, repre_ids) - def _handle_sync_server(self, menu, repre_ids): + def _handle_sitesync(self, menu, repre_ids): """Adds actions for download/upload when SyncServer is enabled Args: @@ -324,7 +324,7 @@ class SceneInventoryView(QtWidgets.QTreeView): (OptionMenu) """ - if not self._controller.is_sync_server_enabled(): + if not self._controller.is_sitesync_enabled(): return menu.addSeparator() diff --git a/client/ayon_core/tools/sceneinventory/window.py b/client/ayon_core/tools/sceneinventory/window.py index 9584524edd..555db3a17c 100644 --- a/client/ayon_core/tools/sceneinventory/window.py +++ b/client/ayon_core/tools/sceneinventory/window.py @@ -70,7 +70,7 @@ class SceneInventoryWindow(QtWidgets.QDialog): view = SceneInventoryView(controller, self) view.setModel(proxy) - sync_enabled = controller.is_sync_server_enabled() + sync_enabled = controller.is_sitesync_enabled() view.setColumnHidden(model.active_site_col, not sync_enabled) view.setColumnHidden(model.remote_site_col, not sync_enabled) diff --git a/client/ayon_core/tools/utils/lib.py b/client/ayon_core/tools/utils/lib.py index 4b7ca5425e..d56b370d75 100644 --- a/client/ayon_core/tools/utils/lib.py +++ b/client/ayon_core/tools/utils/lib.py @@ -7,7 +7,6 @@ from qtpy import QtWidgets, QtCore, QtGui import qtawesome from ayon_core.style import ( - get_default_entity_icon_color, get_objected_colors, get_app_icon_path, ) diff --git a/client/ayon_core/tools/utils/models.py b/client/ayon_core/tools/utils/models.py index 92bed16e98..9b32cc5710 100644 --- a/client/ayon_core/tools/utils/models.py +++ b/client/ayon_core/tools/utils/models.py @@ -2,7 +2,7 @@ import re import logging import qtpy -from qtpy import QtCore, QtGui +from qtpy import QtCore log = logging.getLogger(__name__) diff --git a/client/ayon_core/vendor/python/common/pysync.py b/client/ayon_core/vendor/python/common/pysync.py deleted file mode 100644 index 14a6dda34c..0000000000 --- a/client/ayon_core/vendor/python/common/pysync.py +++ /dev/null @@ -1,216 +0,0 @@ -#!/usr/local/bin/python3 -# https://github.com/snullp/pySync/blob/master/pySync.py - -import sys -import shutil -import os -import time -import configparser -from os.path import ( - getsize, - getmtime, - isfile, - isdir, - join, - abspath, - expanduser, - realpath -) -import logging - -log = logging.getLogger(__name__) - -ignoreFiles = ("Thumbs.db", ".DS_Store") - -# this feature is not yet implemented -ignorePaths = [] - -if os.name == 'nt': - # msvcrt can't function correctly in IDLE - if 'idlelib.run' in sys.modules: - print("Please don't run this script in IDLE.") - sys.exit(0) - import msvcrt - - def flush_input(str, set=None): - if not set: - while msvcrt.kbhit(): - ch = msvcrt.getch() - if ch == '\xff': - print("msvcrt is broken, this is weird.") - sys.exit(0) - return input(str) - else: - return set -else: - import select - - def flush_input(str, set=None): - if not set: - while len(select.select([sys.stdin.fileno()], [], [], 0.0)[0]) > 0: - os.read(sys.stdin.fileno(), 4096) - return input(str) - else: - return set - - -def compare(fa, fb, options_input=[]): - if isfile(fa) == isfile(fb): - if isdir(fa): - walktree(fa, fb, options_input) - elif isfile(fa): - if getsize(fa) != getsize(fb) \ - or int(getmtime(fa)) != int(getmtime(fb)): - log.info(str((fa, ': size=', getsize(fa), 'mtime=', - time.asctime(time.localtime(getmtime(fa)))))) - log.info(str((fb, ': size=', getsize(fb), 'mtime=', - time.asctime(time.localtime(getmtime(fb)))))) - if getmtime(fa) > getmtime(fb): - act = '>' - else: - act = '<' - - set = [i for i in options_input if i in [">", "<"]][0] - - s = flush_input('What to do?(>,<,r,n)[' + act + ']', set=set) - if len(s) > 0: - act = s[0] - if act == '>': - shutil.copy2(fa, fb) - elif act == '<': - shutil.copy2(fb, fa) - elif act == 'r': - if isdir(fa): - shutil.rmtree(fa) - elif isfile(fa): - os.remove(fa) - else: - log.info(str(('Remove: Skipping', fa))) - if isdir(fb): - shutil.rmtree(fb) - elif isfile(fb): - os.remove(fb) - else: - log.info(str(('Remove: Skipping', fb))) - - else: - log.debug(str(('Compare: Skipping non-dir and non-file', fa))) - else: - log.error(str(('Error:', fa, ',', fb, 'have different file type'))) - - -def copy(fa, fb, options_input=[]): - set = [i for i in options_input if i in ["y"]][0] - s = flush_input('Copy ' + fa + ' to another side?(r,y,n)[y]', set=set) - if len(s) > 0: - act = s[0] - else: - act = 'y' - if act == 'y': - if isdir(fa): - shutil.copytree(fa, fb) - elif isfile(fa): - shutil.copy2(fa, fb) - else: - log.debug(str(('Copy: Skipping ', fa))) - elif act == 'r': - if isdir(fa): - shutil.rmtree(fa) - elif isfile(fa): - os.remove(fa) - else: - log.debug(str(('Remove: Skipping ', fa))) - - -stoentry = [] -tarentry = [] - - -def walktree(source, target, options_input=[]): - srclist = os.listdir(source) - tarlist = os.listdir(target) - if '!sync' in srclist: - return - if '!sync' in tarlist: - return - # files in source dir... - for f in srclist: - if f in ignoreFiles: - continue - spath = join(source, f) - tpath = join(target, f) - if spath in ignorePaths: - continue - if spath in stoentry: - # just in case target also have this one - if f in tarlist: - del tarlist[tarlist.index(f)] - continue - - # if also exists in target dir - if f in tarlist: - del tarlist[tarlist.index(f)] - compare(spath, tpath, options_input) - - # exists in source dir only - else: - copy(spath, tpath, options_input) - - # exists in target dir only - set = [i for i in options_input if i in ["<"]] - - for f in tarlist: - if f in ignoreFiles: - continue - spath = join(source, f) - tpath = join(target, f) - if tpath in ignorePaths: - continue - if tpath in tarentry: - continue - if set: - copy(tpath, spath, options_input) - else: - print("REMOVING: {}".format(f)) - if os.path.isdir(tpath): - shutil.rmtree(tpath) - else: - os.remove(tpath) - print("REMOVING: {}".format(f)) - - -if __name__ == '__main__': - stoconf = configparser.RawConfigParser() - tarconf = configparser.RawConfigParser() - stoconf.read("pySync.ini") - tarconf.read(expanduser("~/.pysync")) - stoname = stoconf.sections()[0] - tarname = tarconf.sections()[0] - - # calculate storage's base folder - if stoconf.has_option(stoname, 'BASE'): - stobase = abspath(stoconf.get(stoname, 'BASE')) - stoconf.remove_option(stoname, 'BASE') - else: - stobase = os.getcwd() - - # same, for target's base folder - if tarconf.has_option(tarname, 'BASE'): - tarbase = abspath(tarconf.get(tarname, 'BASE')) - tarconf.remove_option(tarname, 'BASE') - else: - tarbase = expanduser('~/') - - print("Syncing between", stoname, "and", tarname) - sto_content = {x: realpath(join(stobase, stoconf.get(stoname, x))) - for x in stoconf.options(stoname)} - tar_content = {x: realpath(join(tarbase, tarconf.get(tarname, x))) - for x in tarconf.options(tarname)} - stoentry = [sto_content[x] for x in sto_content] - tarentry = [tar_content[x] for x in tar_content] - - for folder in sto_content: - if folder in tar_content: - print('Processing', folder) - walktree(sto_content[folder], tar_content[folder], options_input) - print("Done.") diff --git a/client/ayon_core/version.py b/client/ayon_core/version.py index f3ad9713d5..a60de0493a 100644 --- a/client/ayon_core/version.py +++ b/client/ayon_core/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring AYON core addon version.""" -__version__ = "0.3.0-dev.1" +__version__ = "0.3.1-dev.1" diff --git a/client/ayon_core/widgets/__init__.py b/client/ayon_core/widgets/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/client/ayon_core/widgets/password_dialog.py b/client/ayon_core/widgets/password_dialog.py deleted file mode 100644 index a4c50128ff..0000000000 --- a/client/ayon_core/widgets/password_dialog.py +++ /dev/null @@ -1,33 +0,0 @@ -# TODO remove - kept for kitsu addon which imported it -from qtpy import QtWidgets, QtCore, QtGui - - -class PressHoverButton(QtWidgets.QPushButton): - """ - Deprecated: - Use `openpype.tools.utils.PressHoverButton` instead. - """ - _mouse_pressed = False - _mouse_hovered = False - change_state = QtCore.Signal(bool) - - def mousePressEvent(self, event): - self._mouse_pressed = True - self._mouse_hovered = True - self.change_state.emit(self._mouse_hovered) - super(PressHoverButton, self).mousePressEvent(event) - - def mouseReleaseEvent(self, event): - self._mouse_pressed = False - self._mouse_hovered = False - self.change_state.emit(self._mouse_hovered) - super(PressHoverButton, self).mouseReleaseEvent(event) - - def mouseMoveEvent(self, event): - mouse_pos = self.mapFromGlobal(QtGui.QCursor.pos()) - under_mouse = self.rect().contains(mouse_pos) - if under_mouse != self._mouse_hovered: - self._mouse_hovered = under_mouse - self.change_state.emit(self._mouse_hovered) - - super(PressHoverButton, self).mouseMoveEvent(event) diff --git a/client/pyproject.toml b/client/pyproject.toml index 7b4329a31a..1a0ad7e5f2 100644 --- a/client/pyproject.toml +++ b/client/pyproject.toml @@ -4,19 +4,19 @@ description="AYON core addon." [tool.poetry.dependencies] python = ">=3.9.1,<3.10" -aiohttp_json_rpc = "*" # TVPaint server -aiohttp-middlewares = "^2.0.0" -wsrpc_aiohttp = "^3.1.1" # websocket server -Click = "^8" clique = "1.6.*" jsonschema = "^2.6.0" pyblish-base = "^1.8.11" -pynput = "^1.7.2" # Timers manager - TODO remove speedcopy = "^2.1" six = "^1.15" qtawesome = "0.7.3" [ayon.runtimeDependencies] +aiohttp_json_rpc = "*" # TVPaint server +aiohttp-middlewares = "^2.0.0" +wsrpc_aiohttp = "^3.1.1" # websocket server +Click = "^8" OpenTimelineIO = "0.14.1" opencolorio = "2.2.1" Pillow = "9.5.0" +pynput = "^1.7.2" # Timers manager - TODO remove diff --git a/package.py b/package.py index 470bbf256b..79450d029f 100644 --- a/package.py +++ b/package.py @@ -1,6 +1,6 @@ name = "core" title = "Core" -version = "0.3.0-dev.1" +version = "0.3.1-dev.1" client_dir = "ayon_core" diff --git a/pyproject.toml b/pyproject.toml index ee124ddc2d..3c9ff4ea0a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ [tool.poetry] name = "ayon-core" -version = "0.3.0" +version = "0.3.1" description = "" authors = ["Ynput Team "] readme = "README.md" @@ -92,7 +92,7 @@ line-ending = "auto" [tool.codespell] # Ignore words that are not in the dictionary. -ignore-words-list = "ayon,ynput" +ignore-words-list = "ayon,ynput,parms,parm,hda" skip = "./.*,./package/*,*/vendor/*,*/unreal/integration/*,*/aftereffects/api/extension/js/libs/*" count = true quiet-level = 3 diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index 9b5f3ae571..e61bf6986b 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -9,7 +9,7 @@ from ayon_server.settings import ( task_types_enum, ) -from ayon_server.types import ColorRGB_uint8, ColorRGBA_uint8 +from ayon_server.types import ColorRGBA_uint8 class ValidateBaseModel(BaseSettingsModel): @@ -221,7 +221,12 @@ class OIIOToolArgumentsModel(BaseSettingsModel): class ExtractOIIOTranscodeOutputModel(BaseSettingsModel): _layout = "expanded" - name: str = SettingsField("", title="Name") + name: str = SettingsField( + "", + title="Name", + description="Output name (no space)", + regex=r"[a-zA-Z0-9_]([a-zA-Z0-9_\.\-]*[a-zA-Z0-9_])?$", + ) extension: str = SettingsField("", title="Extension") transcoding_type: str = SettingsField( "colorspace", @@ -424,7 +429,7 @@ class ExtractReviewOutputDefModel(BaseSettingsModel): title="Scale pixel aspect", description=( "Rescale input when it's pixel aspect ratio is not 1." - " Usefull for anamorph reviews." + " Useful for anamorphic reviews." ) ) bg_color: ColorRGBA_uint8 = SettingsField( diff --git a/server_addon/applications/server/__init__.py b/server_addon/applications/server/__init__.py index d5c2de3df3..2668589cbe 100644 --- a/server_addon/applications/server/__init__.py +++ b/server_addon/applications/server/__init__.py @@ -3,6 +3,7 @@ import json import copy from ayon_server.addons import BaseServerAddon, AddonLibrary +from ayon_server.entities.core import attribute_library from ayon_server.lib.postgres import Postgres from .version import __version__ @@ -118,9 +119,28 @@ class ApplicationsAddon(BaseServerAddon): ) async def setup(self): - need_restart = await self.create_applications_attribute() + need_restart = await self.create_required_attributes() if need_restart: self.request_server_restart() + await self._update_enums() + + def _get_applications_def(self): + return { + "name": "applications", + "type": "list_of_strings", + "title": "Applications", + "scope": ["project"], + "enum":[], + } + + def _get_tools_def(self): + return { + "name": "tools", + "type": "list_of_strings", + "title": "Tools", + "scope": ["project", "folder", "task"], + "enum":[], + } async def create_applications_attribute(self) -> bool: """Make sure there are required attributes which ftrack addon needs. @@ -129,6 +149,73 @@ class ApplicationsAddon(BaseServerAddon): bool: 'True' if an attribute was created or updated. """ + need_restart = await self.create_required_attributes() + await self._update_enums() + return need_restart + + async def create_required_attributes(self) -> bool: + """Make sure there are required 'applications' and 'tools' attributes. + This only checks for the existence of the attributes, it does not populate + them with any data. When an attribute is added, server needs to be restarted, + while adding enum data to the attribute does not require a restart. + Returns: + bool: 'True' if an attribute was created or updated. + """ + + # keep track of the last attribute position (for adding new attributes) + apps_attribute_data = self._get_applications_def() + tools_attribute_data = self._get_tools_def() + + apps_attrib_name = apps_attribute_data["name"] + tools_attrib_name = tools_attribute_data["name"] + + async with Postgres.acquire() as conn, conn.transaction(): + query = "SELECT BOOL_OR(name = 'applications') AS has_applications, BOOL_OR(name = 'tools') AS has_tools FROM attributes;" + result = (await conn.fetch(query))[0] + + attributes_to_create = {} + if not result["has_applications"]: + attributes_to_create[apps_attrib_name] = { + "scope": apps_attribute_data["scope"], + "data": { + "title": apps_attribute_data["title"], + "type": apps_attribute_data["type"], + "enum": [], + } + } + + if not result["has_tools"]: + attributes_to_create[tools_attrib_name] = { + "scope": tools_attribute_data["scope"], + "data": { + "title": tools_attribute_data["title"], + "type": tools_attribute_data["type"], + "enum": [], + }, + } + + needs_restart = False + # when any of the required attributes are not present, add them + # and return 'True' to indicate that server needs to be restarted + for name, payload in attributes_to_create.items(): + insert_query = "INSERT INTO attributes (name, scope, data, position) VALUES ($1, $2, $3, (SELECT COALESCE(MAX(position), 0) + 1 FROM attributes)) ON CONFLICT DO NOTHING" + await conn.execute( + insert_query, + name, + payload["scope"], + payload["data"], + ) + needs_restart = True + + return needs_restart + + async def _update_enums(self): + """Updates applications and tools enums based on the addon settings. + This method is called when the addon is started (after we are sure that the + 'applications' and 'tools' attributes exist) and when the addon settings are + updated (using on_settings_updated method). + """ + instance = AddonLibrary.getinstance() app_defs = instance.data.get(self.name) all_applications = [] @@ -148,33 +235,32 @@ class ApplicationsAddon(BaseServerAddon): merge_groups(all_applications, app_groups) merge_groups(all_tools, studio_settings["tool_groups"]) - query = "SELECT name, position, scope, data from public.attributes" - apps_attrib_name = "applications" tools_attrib_name = "tools" apps_enum = get_enum_items_from_groups(all_applications) tools_enum = get_enum_items_from_groups(all_tools) + apps_attribute_data = { "type": "list_of_strings", "title": "Applications", - "enum": apps_enum + "enum": apps_enum, } tools_attribute_data = { "type": "list_of_strings", "title": "Tools", - "enum": tools_enum + "enum": tools_enum, } + apps_scope = ["project"] tools_scope = ["project", "folder", "task"] - apps_match_position = None apps_matches = False - tools_match_position = None tools_matches = False - position = 1 - async for row in Postgres.iterate(query): - position += 1 + + async for row in Postgres.iterate( + "SELECT name, position, scope, data from public.attributes" + ): if row["name"] == apps_attrib_name: # Check if scope is matching ftrack addon requirements if ( @@ -182,7 +268,6 @@ class ApplicationsAddon(BaseServerAddon): and row["data"].get("enum") == apps_enum ): apps_matches = True - apps_match_position = row["position"] elif row["name"] == tools_attrib_name: if ( @@ -190,45 +275,41 @@ class ApplicationsAddon(BaseServerAddon): and row["data"].get("enum") == tools_enum ): tools_matches = True - tools_match_position = row["position"] if apps_matches and tools_matches: - return False + return - postgre_query = "\n".join(( - "INSERT INTO public.attributes", - " (name, position, scope, data)", - "VALUES", - " ($1, $2, $3, $4)", - "ON CONFLICT (name)", - "DO UPDATE SET", - " scope = $3,", - " data = $4", - )) if not apps_matches: - # Reuse position from found attribute - if apps_match_position is None: - apps_match_position = position - position += 1 - await Postgres.execute( - postgre_query, - apps_attrib_name, - apps_match_position, + """ + UPDATE attributes SET + scope = $1, + data = $2 + WHERE + name = $3 + """, apps_scope, apps_attribute_data, + apps_attrib_name, ) if not tools_matches: - if tools_match_position is None: - tools_match_position = position - position += 1 - await Postgres.execute( - postgre_query, - tools_attrib_name, - tools_match_position, + """ + UPDATE attributes SET + scope = $1, + data = $2 + WHERE + name = $3 + """, tools_scope, tools_attribute_data, + tools_attrib_name, ) - return True + + # Reset attributes cache on server + await attribute_library.load() + + async def on_settings_changed(self, *args, **kwargs): + _ = args, kwargs + await self._update_enums() diff --git a/server_addon/applications/server/version.py b/server_addon/applications/server/version.py index 9cb17e7976..c11f861afb 100644 --- a/server_addon/applications/server/version.py +++ b/server_addon/applications/server/version.py @@ -1 +1 @@ -__version__ = "0.1.8" +__version__ = "0.1.9" diff --git a/server_addon/blender/server/settings/main.py b/server_addon/blender/server/settings/main.py index aed9b5632d..3cca22ae3b 100644 --- a/server_addon/blender/server/settings/main.py +++ b/server_addon/blender/server/settings/main.py @@ -6,7 +6,7 @@ from ayon_server.settings import ( from .imageio import BlenderImageIOModel from .publish_plugins import ( - PublishPuginsModel, + PublishPluginsModel, DEFAULT_BLENDER_PUBLISH_SETTINGS ) from .render_settings import ( @@ -47,8 +47,8 @@ class BlenderSettings(BaseSettingsModel): default_factory=TemplateWorkfileBaseOptions, title="Workfile Builder" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish Plugins" ) diff --git a/server_addon/blender/server/settings/publish_plugins.py b/server_addon/blender/server/settings/publish_plugins.py index c742fdc5bd..e998d7b057 100644 --- a/server_addon/blender/server/settings/publish_plugins.py +++ b/server_addon/blender/server/settings/publish_plugins.py @@ -66,7 +66,7 @@ class ExtractPlayblastModel(BaseSettingsModel): return validate_json_dict(value) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): ValidateCameraZeroKeyframe: ValidatePluginModel = SettingsField( default_factory=ValidatePluginModel, title="Validate Camera Zero Keyframe", diff --git a/server_addon/celaction/server/settings.py b/server_addon/celaction/server/settings.py index 9208948a07..afa9773477 100644 --- a/server_addon/celaction/server/settings.py +++ b/server_addon/celaction/server/settings.py @@ -42,7 +42,7 @@ class WorkfileModel(BaseSettingsModel): ) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectRenderPath: CollectRenderPathModel = SettingsField( default_factory=CollectRenderPathModel, title="Collect Render Path" @@ -57,8 +57,8 @@ class CelActionSettings(BaseSettingsModel): workfile: WorkfileModel = SettingsField( title="Workfile" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish plugins", ) diff --git a/server_addon/create_ayon_addons.py b/server_addon/create_ayon_addons.py index 9553980f5d..c2686199be 100644 --- a/server_addon/create_ayon_addons.py +++ b/server_addon/create_ayon_addons.py @@ -1,7 +1,6 @@ import os import sys import re -import json import shutil import argparse import zipfile @@ -220,7 +219,6 @@ def main( addons=None, ): current_dir = Path(os.path.dirname(os.path.abspath(__file__))) - root_dir = current_dir.parent create_zip = not skip_zip if output_dir: diff --git a/server_addon/flame/server/settings/create_plugins.py b/server_addon/flame/server/settings/create_plugins.py index 44fb8a2e91..2f17ec40c4 100644 --- a/server_addon/flame/server/settings/create_plugins.py +++ b/server_addon/flame/server/settings/create_plugins.py @@ -87,7 +87,7 @@ class CreateShotClipModel(BaseSettingsModel): ) -class CreatePuginsModel(BaseSettingsModel): +class CreatePluginsModel(BaseSettingsModel): CreateShotClip: CreateShotClipModel = SettingsField( default_factory=CreateShotClipModel, title="Create Shot Clip" diff --git a/server_addon/flame/server/settings/main.py b/server_addon/flame/server/settings/main.py index 047f5af287..c838ee9646 100644 --- a/server_addon/flame/server/settings/main.py +++ b/server_addon/flame/server/settings/main.py @@ -1,8 +1,8 @@ from ayon_server.settings import BaseSettingsModel, SettingsField from .imageio import FlameImageIOModel, DEFAULT_IMAGEIO_SETTINGS -from .create_plugins import CreatePuginsModel, DEFAULT_CREATE_SETTINGS -from .publish_plugins import PublishPuginsModel, DEFAULT_PUBLISH_SETTINGS +from .create_plugins import CreatePluginsModel, DEFAULT_CREATE_SETTINGS +from .publish_plugins import PublishPluginsModel, DEFAULT_PUBLISH_SETTINGS from .loader_plugins import LoaderPluginsModel, DEFAULT_LOADER_SETTINGS @@ -11,12 +11,12 @@ class FlameSettings(BaseSettingsModel): default_factory=FlameImageIOModel, title="Color Management (ImageIO)" ) - create: CreatePuginsModel = SettingsField( - default_factory=CreatePuginsModel, + create: CreatePluginsModel = SettingsField( + default_factory=CreatePluginsModel, title="Create plugins" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish plugins" ) load: LoaderPluginsModel = SettingsField( diff --git a/server_addon/flame/server/settings/publish_plugins.py b/server_addon/flame/server/settings/publish_plugins.py index decb00fcfa..b34083b4e2 100644 --- a/server_addon/flame/server/settings/publish_plugins.py +++ b/server_addon/flame/server/settings/publish_plugins.py @@ -121,7 +121,7 @@ class IntegrateBatchGroupModel(BaseSettingsModel): ) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectTimelineInstances: CollectTimelineInstancesModel = SettingsField( default_factory=CollectTimelineInstancesModel, title="Collect Timeline Instances" diff --git a/server_addon/hiero/server/settings/loader_plugins.py b/server_addon/hiero/server/settings/loader_plugins.py index b5a81d1ae2..682f9fd2d9 100644 --- a/server_addon/hiero/server/settings/loader_plugins.py +++ b/server_addon/hiero/server/settings/loader_plugins.py @@ -15,7 +15,7 @@ class LoadClipModel(BaseSettingsModel): ) -class LoaderPuginsModel(BaseSettingsModel): +class LoaderPluginsModel(BaseSettingsModel): LoadClip: LoadClipModel = SettingsField( default_factory=LoadClipModel, title="Load Clip" diff --git a/server_addon/hiero/server/settings/main.py b/server_addon/hiero/server/settings/main.py index b170ecafb8..378af6a539 100644 --- a/server_addon/hiero/server/settings/main.py +++ b/server_addon/hiero/server/settings/main.py @@ -9,11 +9,11 @@ from .create_plugins import ( DEFAULT_CREATE_SETTINGS ) from .loader_plugins import ( - LoaderPuginsModel, + LoaderPluginsModel, DEFAULT_LOADER_PLUGINS_SETTINGS ) from .publish_plugins import ( - PublishPuginsModel, + PublishPluginsModel, DEFAULT_PUBLISH_PLUGIN_SETTINGS ) from .scriptsmenu import ( @@ -35,12 +35,12 @@ class HieroSettings(BaseSettingsModel): default_factory=CreatorPluginsSettings, title="Creator Plugins", ) - load: LoaderPuginsModel = SettingsField( - default_factory=LoaderPuginsModel, + load: LoaderPluginsModel = SettingsField( + default_factory=LoaderPluginsModel, title="Loader plugins" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish plugins" ) scriptsmenu: ScriptsmenuSettings = SettingsField( diff --git a/server_addon/hiero/server/settings/publish_plugins.py b/server_addon/hiero/server/settings/publish_plugins.py index c35c61c332..0e43d4ce3a 100644 --- a/server_addon/hiero/server/settings/publish_plugins.py +++ b/server_addon/hiero/server/settings/publish_plugins.py @@ -49,7 +49,7 @@ class ExtractReviewCutUpVideoModel(BaseSettingsModel): ) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectInstanceVersion: CollectInstanceVersionModel = SettingsField( default_factory=CollectInstanceVersionModel, title="Collect Instance Version" diff --git a/server_addon/maya/server/settings/publishers.py b/server_addon/maya/server/settings/publishers.py index ef23b77d19..27288053a2 100644 --- a/server_addon/maya/server/settings/publishers.py +++ b/server_addon/maya/server/settings/publishers.py @@ -299,6 +299,16 @@ class ExtractAlembicModel(BaseSettingsModel): families: list[str] = SettingsField( default_factory=list, title="Families") + bake_attributes: list[str] = SettingsField( + default_factory=list, title="Bake Attributes", + description="List of attributes that will be included in the alembic " + "export.", + ) + bake_attribute_prefixes: list[str] = SettingsField( + default_factory=list, title="Bake Attribute Prefixes", + description="List of attribute prefixes for attributes that will be " + "included in the alembic export.", + ) class ExtractObjModel(BaseSettingsModel): @@ -306,6 +316,12 @@ class ExtractObjModel(BaseSettingsModel): optional: bool = SettingsField(title="Optional") +class ExtractModelModel(BaseSettingsModel): + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") + + class ExtractMayaSceneRawModel(BaseSettingsModel): """Add loaded instances to those published families:""" enabled: bool = SettingsField(title="ExtractMayaSceneRaw") @@ -315,14 +331,13 @@ class ExtractMayaSceneRawModel(BaseSettingsModel): class ExtractCameraAlembicModel(BaseSettingsModel): - """ - List of attributes that will be added to the baked alembic camera. Needs to be written in python list syntax. - """ enabled: bool = SettingsField(title="ExtractCameraAlembic") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") bake_attributes: str = SettingsField( - "[]", title="Base Attributes", widget="textarea" + "[]", title="Bake Attributes", widget="textarea", + description="List of attributes that will be included in the alembic " + "camera export. Needs to be written as a JSON list.", ) @validator("bake_attributes") @@ -363,7 +378,9 @@ class ExtractLookModel(BaseSettingsModel): class ExtractGPUCacheModel(BaseSettingsModel): - enabled: bool = True + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") families: list[str] = SettingsField(default_factory=list, title="Families") step: float = SettingsField(1.0, ge=1.0, title="Step") stepSave: int = SettingsField(1, ge=1, title="Step Save") @@ -790,6 +807,10 @@ class PublishersModel(BaseSettingsModel): default_factory=ExtractGPUCacheModel, title="Extract GPU Cache", ) + ExtractModel: ExtractModelModel = SettingsField( + default_factory=ExtractModelModel, + title="Extract Model (Maya Scene)" + ) DEFAULT_SUFFIX_NAMING = { @@ -1185,7 +1206,9 @@ DEFAULT_PUBLISH_SETTINGS = { "pointcache", "model", "vrayproxy.alembic" - ] + ], + "bake_attributes": [], + "bake_attribute_prefixes": [] }, "ExtractObj": { "enabled": False, @@ -1330,6 +1353,8 @@ DEFAULT_PUBLISH_SETTINGS = { }, "ExtractGPUCache": { "enabled": False, + "optional": False, + "active": True, "families": [ "model", "animation", @@ -1342,5 +1367,10 @@ DEFAULT_PUBLISH_SETTINGS = { "optimizeAnimationsForMotionBlur": True, "writeMaterials": True, "useBaseTessellation": True + }, + "ExtractModel": { + "enabled": True, + "optional": True, + "active": True, } } diff --git a/server_addon/maya/server/version.py b/server_addon/maya/server/version.py index 0b44700d27..1a4f79a972 100644 --- a/server_addon/maya/server/version.py +++ b/server_addon/maya/server/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring addon version.""" -__version__ = "0.1.11" +__version__ = "0.1.13" diff --git a/server_addon/nuke/server/settings/loader_plugins.py b/server_addon/nuke/server/settings/loader_plugins.py index a5c3315fd4..531ea8d986 100644 --- a/server_addon/nuke/server/settings/loader_plugins.py +++ b/server_addon/nuke/server/settings/loader_plugins.py @@ -42,7 +42,7 @@ class LoadClipModel(BaseSettingsModel): ) -class LoaderPuginsModel(BaseSettingsModel): +class LoaderPluginsModel(BaseSettingsModel): LoadImage: LoadImageModel = SettingsField( default_factory=LoadImageModel, title="Load Image" diff --git a/server_addon/nuke/server/settings/main.py b/server_addon/nuke/server/settings/main.py index 2b269f1fce..1fd347cc21 100644 --- a/server_addon/nuke/server/settings/main.py +++ b/server_addon/nuke/server/settings/main.py @@ -1,7 +1,6 @@ from ayon_server.settings import ( BaseSettingsModel, SettingsField, - ensure_unique_names ) from .general import ( @@ -29,11 +28,11 @@ from .create_plugins import ( DEFAULT_CREATE_SETTINGS ) from .publish_plugins import ( - PublishPuginsModel, + PublishPluginsModel, DEFAULT_PUBLISH_PLUGIN_SETTINGS ) from .loader_plugins import ( - LoaderPuginsModel, + LoaderPluginsModel, DEFAULT_LOADER_PLUGINS_SETTINGS ) from .workfile_builder import ( @@ -76,13 +75,13 @@ class NukeSettings(BaseSettingsModel): title="Creator Plugins", ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish Plugins", ) - load: LoaderPuginsModel = SettingsField( - default_factory=LoaderPuginsModel, + load: LoaderPluginsModel = SettingsField( + default_factory=LoaderPluginsModel, title="Loader Plugins", ) diff --git a/server_addon/nuke/server/settings/publish_plugins.py b/server_addon/nuke/server/settings/publish_plugins.py index 7d9c914fee..d5b05d8715 100644 --- a/server_addon/nuke/server/settings/publish_plugins.py +++ b/server_addon/nuke/server/settings/publish_plugins.py @@ -219,7 +219,7 @@ class IncrementScriptVersionModel(BaseSettingsModel): active: bool = SettingsField(title="Active") -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectInstanceData: CollectInstanceDataModel = SettingsField( title="Collect Instance Version", default_factory=CollectInstanceDataModel, diff --git a/server_addon/resolve/server/settings.py b/server_addon/resolve/server/settings.py index dcdb2f1b27..d9cbb98340 100644 --- a/server_addon/resolve/server/settings.py +++ b/server_addon/resolve/server/settings.py @@ -69,7 +69,7 @@ class CreateShotClipModels(BaseSettingsModel): ) -class CreatorPuginsModel(BaseSettingsModel): +class CreatorPluginsModel(BaseSettingsModel): CreateShotClip: CreateShotClipModels = SettingsField( default_factory=CreateShotClipModels, title="Create Shot Clip" @@ -84,8 +84,8 @@ class ResolveSettings(BaseSettingsModel): default_factory=ResolveImageIOModel, title="Color Management (ImageIO)" ) - create: CreatorPuginsModel = SettingsField( - default_factory=CreatorPuginsModel, + create: CreatorPluginsModel = SettingsField( + default_factory=CreatorPluginsModel, title="Creator plugins", ) diff --git a/server_addon/traypublisher/server/settings/simple_creators.py b/server_addon/traypublisher/server/settings/simple_creators.py index 924eeedd23..6b979bbe52 100644 --- a/server_addon/traypublisher/server/settings/simple_creators.py +++ b/server_addon/traypublisher/server/settings/simple_creators.py @@ -142,6 +142,7 @@ DEFAULT_SIMPLE_CREATORS = [ "extensions": [ ".exr", ".png", + ".dng", ".dpx", ".jpg", ".tiff", @@ -165,6 +166,7 @@ DEFAULT_SIMPLE_CREATORS = [ "extensions": [ ".exr", ".png", + ".dng", ".dpx", ".jpg", ".jpeg", @@ -215,6 +217,7 @@ DEFAULT_SIMPLE_CREATORS = [ ".exr", ".jpg", ".jpeg", + ".dng", ".dpx", ".bmp", ".tif", diff --git a/server_addon/traypublisher/server/version.py b/server_addon/traypublisher/server/version.py index e57ad00718..de699158fd 100644 --- a/server_addon/traypublisher/server/version.py +++ b/server_addon/traypublisher/server/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring addon version.""" -__version__ = "0.1.3" +__version__ = "0.1.4" diff --git a/server_addon/tvpaint/server/settings/main.py b/server_addon/tvpaint/server/settings/main.py index c6b6c9ab12..f20e9ecc9c 100644 --- a/server_addon/tvpaint/server/settings/main.py +++ b/server_addon/tvpaint/server/settings/main.py @@ -1,7 +1,6 @@ from ayon_server.settings import ( BaseSettingsModel, SettingsField, - ensure_unique_names, ) from .imageio import TVPaintImageIOModel