diff --git a/client/ayon_core/addon/base.py b/client/ayon_core/addon/base.py index 6ef838652e..21b1193b07 100644 --- a/client/ayon_core/addon/base.py +++ b/client/ayon_core/addon/base.py @@ -15,6 +15,7 @@ from abc import ABCMeta, abstractmethod import six import appdirs import ayon_api +from semver import VersionInfo from ayon_core import AYON_CORE_ROOT from ayon_core.lib import Logger, is_dev_mode_enabled @@ -46,6 +47,11 @@ IGNORED_HOSTS_IN_AYON = { } IGNORED_MODULES_IN_AYON = set() +# When addon was moved from ayon-core codebase +# - this is used to log the missing addon +MOVED_ADDON_MILESTONE_VERSIONS = { + "applications": VersionInfo(0, 2, 0), +} # Inherit from `object` for Python 2 hosts class _ModuleClass(object): @@ -192,6 +198,45 @@ def _get_ayon_addons_information(bundle_info): return output +def _handle_moved_addons(addon_name, milestone_version, log): + """Log message that addon version is not compatible with current core. + + The function can return path to addon client code, but that can happen + only if ayon-core is used from code (for development), but still + logs a warning. + + Args: + addon_name (str): Addon name. + milestone_version (str): Milestone addon version. + log (logging.Logger): Logger object. + + Returns: + Union[str, None]: Addon dir or None. + """ + # Handle addons which were moved out of ayon-core + # - Try to fix it by loading it directly from server addons dir in + # ayon-core repository. But that will work only if ayon-core is + # used from code. + addon_dir = os.path.join( + os.path.dirname(os.path.dirname(AYON_CORE_ROOT)), + "server_addon", + addon_name, + "client", + ) + if not os.path.exists(addon_dir): + log.error(( + "Addon '{}' is not be available." + " Please update applications addon to '{}' or higher." + ).format(addon_name, milestone_version)) + return None + + log.warning(( + "Please update '{}' addon to '{}' or higher." + " Using client code from ayon-core repository." + ).format(addon_name, milestone_version)) + return addon_dir + + def _load_ayon_addons(openpype_modules, modules_key, log): """Load AYON addons based on information from server. @@ -249,6 +294,7 @@ def _load_ayon_addons(openpype_modules, modules_key, log): use_dev_path = dev_addon_info.get("enabled", False) addon_dir = None + milestone_version = MOVED_ADDON_MILESTONE_VERSIONS.get(addon_name) if use_dev_path: addon_dir = dev_addon_info["path"] if not addon_dir or not os.path.exists(addon_dir): @@ -257,6 +303,16 @@ def _load_ayon_addons(openpype_modules, modules_key, log): ).format(addon_name, addon_version, addon_dir)) continue + elif ( + milestone_version is not None + and VersionInfo.parse(addon_version) < milestone_version + ): + addon_dir = _handle_moved_addons( + addon_name, milestone_version, log + ) + if not addon_dir: + continue + elif addons_dir_exists: folder_name = "{}_{}".format(addon_name, addon_version) addon_dir = os.path.join(addons_dir, folder_name) @@ -336,66 +392,9 @@ def _load_ayon_addons(openpype_modules, modules_key, log): return addons_to_skip_in_core -def _load_ayon_core_addons_dir( - ignore_addon_names, openpype_modules, modules_key, log -): - addons_dir = os.path.join(AYON_CORE_ROOT, "addons") - if not os.path.exists(addons_dir): - return - - imported_modules = [] - - # Make sure that addons which already have client code are not loaded - # from core again, with older code - filtered_paths = [] - for name in os.listdir(addons_dir): - if name in ignore_addon_names: - continue - path = os.path.join(addons_dir, name) - if os.path.isdir(path): - filtered_paths.append(path) - - for path in filtered_paths: - while path in sys.path: - sys.path.remove(path) - sys.path.insert(0, path) - - for name in os.listdir(path): - fullpath = os.path.join(path, name) - if os.path.isfile(fullpath): - basename, ext = os.path.splitext(name) - if ext != ".py": - continue - else: - basename = name - try: - module = __import__(basename, fromlist=("",)) - for attr_name in dir(module): - attr = getattr(module, attr_name) - if ( - inspect.isclass(attr) - and issubclass(attr, AYONAddon) - ): - new_import_str = "{}.{}".format(modules_key, basename) - sys.modules[new_import_str] = module - setattr(openpype_modules, basename, module) - imported_modules.append(module) - break - - except Exception: - log.error( - "Failed to import addon '{}'.".format(fullpath), - exc_info=True - ) - return imported_modules - - def _load_addons_in_core( ignore_addon_names, openpype_modules, modules_key, log ): - _load_ayon_core_addons_dir( - ignore_addon_names, openpype_modules, modules_key, log - ) # Add current directory at first place # - has small differences in import logic hosts_dir = os.path.join(AYON_CORE_ROOT, "hosts") diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py index 4134e9d593..c28042b6ae 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py @@ -41,7 +41,6 @@ class CollectAERender(publish.AbstractCollectRender): def get_instances(self, context): instances = [] - instances_to_remove = [] app_version = CollectAERender.get_stub().get_app_version() app_version = app_version[0:4] @@ -117,7 +116,10 @@ class CollectAERender(publish.AbstractCollectRender): fps=fps, app_version=app_version, publish_attributes=inst.data.get("publish_attributes", {}), - file_names=[item.file_name for item in render_q] + file_names=[item.file_name for item in render_q], + + # The source instance this render instance replaces + source_instance=inst ) comp = compositions_by_id.get(comp_id) @@ -145,10 +147,7 @@ class CollectAERender(publish.AbstractCollectRender): instance.families.remove("review") instances.append(instance) - instances_to_remove.append(inst) - for instance in instances_to_remove: - context.remove(instance) return instances def get_expected_files(self, render_instance): diff --git a/client/ayon_core/hosts/blender/addon.py b/client/ayon_core/hosts/blender/addon.py index b7484de243..6a4b325365 100644 --- a/client/ayon_core/hosts/blender/addon.py +++ b/client/ayon_core/hosts/blender/addon.py @@ -55,8 +55,7 @@ class BlenderAddon(AYONAddon, IHostAddon): ) # Define Qt binding if not defined - if not env.get("QT_PREFERRED_BINDING"): - env["QT_PREFERRED_BINDING"] = "PySide2" + env.pop("QT_PREFERRED_BINDING", None) def get_launch_hook_paths(self, app): if app.host_name != self.host_name: diff --git a/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py b/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py index 8f46eea0de..87a4f5cfad 100644 --- a/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py +++ b/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py @@ -31,7 +31,7 @@ class InstallPySideToBlender(PreLaunchHook): def inner_execute(self): # Get blender's python directory - version_regex = re.compile(r"^[2-4]\.[0-9]+$") + version_regex = re.compile(r"^([2-4])\.[0-9]+$") platform = system().lower() executable = self.launch_context.executable.executable_path @@ -42,7 +42,8 @@ class InstallPySideToBlender(PreLaunchHook): if os.path.basename(executable).lower() != expected_executable: self.log.info(( f"Executable does not lead to {expected_executable} file." - "Can't determine blender's python to check/install PySide2." + "Can't determine blender's python to check/install" + " Qt binding." )) return @@ -73,6 +74,15 @@ class InstallPySideToBlender(PreLaunchHook): return version_subfolder = version_subfolders[0] + before_blender_4 = False + if int(version_regex.match(version_subfolder).group(1)) < 4: + before_blender_4 = True + # Blender 4 has Python 3.11 which does not support 'PySide2' + # QUESTION could we always install PySide6? + qt_binding = "PySide2" if before_blender_4 else "PySide6" + # Use PySide6 6.6.3 because 6.7.0 had a bug + # - 'QTextEdit' can't be added to 'QBoxLayout' + qt_binding_version = None if before_blender_4 else "6.6.3" python_dir = os.path.join(versions_dir, version_subfolder, "python") python_lib = os.path.join(python_dir, "lib") @@ -116,22 +126,41 @@ class InstallPySideToBlender(PreLaunchHook): return # Check if PySide2 is installed and skip if yes - if self.is_pyside_installed(python_executable): + if self.is_pyside_installed(python_executable, qt_binding): self.log.debug("Blender has already installed PySide2.") return # Install PySide2 in blender's python if platform == "windows": - result = self.install_pyside_windows(python_executable) + result = self.install_pyside_windows( + python_executable, + qt_binding, + qt_binding_version, + before_blender_4, + ) else: - result = self.install_pyside(python_executable) + result = self.install_pyside( + python_executable, + qt_binding, + qt_binding_version, + ) if result: - self.log.info("Successfully installed PySide2 module to blender.") + self.log.info( + f"Successfully installed {qt_binding} module to blender." + ) else: - self.log.warning("Failed to install PySide2 module to blender.") + self.log.warning( + f"Failed to install {qt_binding} module to blender." + ) - def install_pyside_windows(self, python_executable): + def install_pyside_windows( + self, + python_executable, + qt_binding, + qt_binding_version, + before_blender_4, + ): """Install PySide2 python module to blender's python. Installation requires administration rights that's why it is required @@ -139,7 +168,6 @@ class InstallPySideToBlender(PreLaunchHook): administration rights. """ try: - import win32api import win32con import win32process import win32event @@ -150,12 +178,37 @@ class InstallPySideToBlender(PreLaunchHook): self.log.warning("Couldn't import \"pywin32\" modules") return + if qt_binding_version: + qt_binding = f"{qt_binding}=={qt_binding_version}" + try: # Parameters # - use "-m pip" as module pip to install PySide2 and argument # "--ignore-installed" is to force install module to blender's # site-packages and make sure it is binary compatible - parameters = "-m pip install --ignore-installed PySide2" + fake_exe = "fake.exe" + site_packages_prefix = os.path.dirname( + os.path.dirname(python_executable) + ) + args = [ + fake_exe, + "-m", + "pip", + "install", + "--ignore-installed", + qt_binding, + ] + if not before_blender_4: + # Define prefix for site package + # Python in blender 4.x is installing packages in AppData and + # not in blender's directory. + args.extend(["--prefix", site_packages_prefix]) + + parameters = ( + subprocess.list2cmdline(args) + .lstrip(fake_exe) + .lstrip(" ") + ) # Execute command and ask for administrator's rights process_info = ShellExecuteEx( @@ -173,20 +226,29 @@ class InstallPySideToBlender(PreLaunchHook): except pywintypes.error: pass - def install_pyside(self, python_executable): - """Install PySide2 python module to blender's python.""" + def install_pyside( + self, + python_executable, + qt_binding, + qt_binding_version, + ): + """Install Qt binding python module to blender's python.""" + if qt_binding_version: + qt_binding = f"{qt_binding}=={qt_binding_version}" try: # Parameters - # - use "-m pip" as module pip to install PySide2 and argument + # - use "-m pip" as module pip to install qt binding and argument # "--ignore-installed" is to force install module to blender's # site-packages and make sure it is binary compatible + # TODO find out if blender 4.x on linux/darwin does install + # qt binding to correct place. args = [ python_executable, "-m", "pip", "install", "--ignore-installed", - "PySide2", + qt_binding, ] process = subprocess.Popen( args, stdout=subprocess.PIPE, universal_newlines=True @@ -203,13 +265,15 @@ class InstallPySideToBlender(PreLaunchHook): except subprocess.SubprocessError: pass - def is_pyside_installed(self, python_executable): + def is_pyside_installed(self, python_executable, qt_binding): """Check if PySide2 module is in blender's pip list. Check that PySide2 is installed directly in blender's site-packages. It is possible that it is installed in user's site-packages but that may be incompatible with blender's python. """ + + qt_binding_low = qt_binding.lower() # Get pip list from blender's python executable args = [python_executable, "-m", "pip", "list"] process = subprocess.Popen(args, stdout=subprocess.PIPE) @@ -226,6 +290,6 @@ class InstallPySideToBlender(PreLaunchHook): if not line: continue package_name = line[0:package_len].strip() - if package_name.lower() == "pyside2": + if package_name.lower() == qt_binding_low: return True return False diff --git a/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py b/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py index bea997108b..d20eaad9fc 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py @@ -167,7 +167,7 @@ class JsonLayoutLoader(plugin.AssetLoader): asset_group.empty_display_type = 'SINGLE_ARROW' avalon_container.objects.link(asset_group) - self._process(libpath, asset, asset_group, None) + self._process(libpath, asset_name, asset_group, None) bpy.context.scene.collection.objects.link(asset_group) diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_abc.py b/client/ayon_core/hosts/blender/plugins/publish/extract_abc.py index 094f88fd8c..6590be515c 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_abc.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_abc.py @@ -2,6 +2,7 @@ import os import bpy +from ayon_core.lib import BoolDef from ayon_core.pipeline import publish from ayon_core.hosts.blender.api import plugin @@ -17,6 +18,8 @@ class ExtractABC(publish.Extractor, publish.OptionalPyblishPluginMixin): if not self.is_active(instance.data): return + attr_values = self.get_attr_values_from_data(instance.data) + # Define extract output file path stagingdir = self.staging_dir(instance) folder_name = instance.data["folderEntity"]["name"] @@ -46,7 +49,8 @@ class ExtractABC(publish.Extractor, publish.OptionalPyblishPluginMixin): bpy.ops.wm.alembic_export( filepath=filepath, selected=True, - flatten=False + flatten=False, + subdiv_schema=attr_values.get("subdiv_schema", False) ) plugin.deselect_all() @@ -65,6 +69,21 @@ class ExtractABC(publish.Extractor, publish.OptionalPyblishPluginMixin): self.log.debug("Extracted instance '%s' to: %s", instance.name, representation) + @classmethod + def get_attribute_defs(cls): + return [ + BoolDef( + "subdiv_schema", + label="Alembic Mesh Subdiv Schema", + tooltip="Export Meshes using Alembic's subdivision schema.\n" + "Enabling this includes creases with the export but " + "excludes the mesh's normals.\n" + "Enabling this usually result in smaller file size " + "due to lack of normals.", + default=False + ) + ] + class ExtractModelABC(ExtractABC): """Extract model as ABC.""" diff --git a/client/ayon_core/hosts/fusion/api/lib.py b/client/ayon_core/hosts/fusion/api/lib.py index 03a1eeeb65..08722463e1 100644 --- a/client/ayon_core/hosts/fusion/api/lib.py +++ b/client/ayon_core/hosts/fusion/api/lib.py @@ -3,8 +3,8 @@ import sys import re import contextlib -from ayon_core.lib import Logger - +from ayon_core.lib import Logger, BoolDef, UILabelDef +from ayon_core.style import load_stylesheet from ayon_core.pipeline import registered_host from ayon_core.pipeline.create import CreateContext from ayon_core.pipeline.context_tools import get_current_folder_entity @@ -181,7 +181,6 @@ def validate_comp_prefs(comp=None, force_repair=False): from . import menu from ayon_core.tools.utils import SimplePopup - from ayon_core.style import load_stylesheet dialog = SimplePopup(parent=menu.menu) dialog.setWindowTitle("Fusion comp has invalid configuration") @@ -340,9 +339,7 @@ def prompt_reset_context(): from ayon_core.tools.attribute_defs.dialog import ( AttributeDefinitionsDialog ) - from ayon_core.style import load_stylesheet - from ayon_core.lib import BoolDef, UILabelDef - from qtpy import QtWidgets, QtCore + from qtpy import QtCore definitions = [ UILabelDef( diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_launch_menu_hook.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_launch_menu_hook.py index e70d4b844e..113a1ffe59 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_fusion_launch_menu_hook.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_launch_menu_hook.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib import PreLaunchHook +from ayon_applications import PreLaunchHook from ayon_core.hosts.fusion import FUSION_HOST_DIR diff --git a/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py b/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py index ab12078c43..4678d5bac7 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py @@ -85,7 +85,6 @@ class InstallPySideToFusion(PreLaunchHook): administration rights. """ try: - import win32api import win32con import win32process import win32event diff --git a/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py b/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py index 36102d02cb..7a2844d5db 100644 --- a/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py @@ -37,14 +37,13 @@ class CollectFusionRender( aspect_x = comp_frame_format_prefs["AspectX"] aspect_y = comp_frame_format_prefs["AspectY"] - instances = [] - instances_to_remove = [] current_file = context.data["currentFile"] version = context.data["version"] project_entity = context.data["projectEntity"] + instances = [] for inst in context: if not inst.data.get("active", True): continue @@ -91,7 +90,10 @@ class CollectFusionRender( frameStep=1, fps=comp_frame_format_prefs.get("Rate"), app_version=comp.GetApp().Version, - publish_attributes=inst.data.get("publish_attributes", {}) + publish_attributes=inst.data.get("publish_attributes", {}), + + # The source instance this render instance replaces + source_instance=inst ) render_target = inst.data["creator_attributes"]["render_target"] @@ -114,13 +116,7 @@ class CollectFusionRender( # to skip ExtractReview locally instance.families.remove("review") - # add new instance to the list and remove the original - # instance since it is not needed anymore instances.append(instance) - instances_to_remove.append(inst) - - for instance in instances_to_remove: - context.remove(instance) return instances diff --git a/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py b/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py index 67e1f18cbf..b7a508f0b5 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py @@ -92,10 +92,6 @@ class PrecollectInstances(pyblish.api.ContextPlugin): folder_path, folder_name = self._get_folder_data(tag_data) - product_name = tag_data.get("productName") - if product_name is None: - product_name = tag_data["subset"] - families = [str(f) for f in tag_data["families"]] # TODO: remove backward compatibility @@ -293,7 +289,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(folder_path, subset), + "name": "{}_{}".format(folder_path, product_name), "label": label, "productName": product_name, "productType": product_type, diff --git a/client/ayon_core/hosts/houdini/api/lib.py b/client/ayon_core/hosts/houdini/api/lib.py index da1b21ad95..7ca8f7f8f0 100644 --- a/client/ayon_core/hosts/houdini/api/lib.py +++ b/client/ayon_core/hosts/houdini/api/lib.py @@ -811,6 +811,43 @@ def get_current_context_template_data_with_folder_attrs(): return template_data +def set_review_color_space(opengl_node, review_color_space="", log=None): + """Set ociocolorspace parameter for the given OpenGL node. + + Set `ociocolorspace` parameter of the given OpenGl node + to to the given review_color_space value. + If review_color_space is empty, a default colorspace corresponding to + the display & view of the current Houdini session will be used. + + Args: + opengl_node (hou.Node): ROP node to set its ociocolorspace parm. + review_color_space (str): Colorspace value for ociocolorspace parm. + log (logging.Logger): Logger to log to. + """ + + if log is None: + log = self.log + + # Set Color Correction parameter to OpenColorIO + colorcorrect_parm = opengl_node.parm("colorcorrect") + if colorcorrect_parm.eval() != 2: + colorcorrect_parm.set(2) + log.debug( + "'Color Correction' parm on '{}' has been set to" + " 'OpenColorIO'".format(opengl_node.path()) + ) + + opengl_node.setParms( + {"ociocolorspace": review_color_space} + ) + + log.debug( + "'OCIO Colorspace' parm on '{}' has been set to " + "the view color space '{}'" + .format(opengl_node, review_color_space) + ) + + def get_context_var_changes(): """get context var changes.""" diff --git a/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py b/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py new file mode 100644 index 0000000000..2e97c06bff --- /dev/null +++ b/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py @@ -0,0 +1,58 @@ +from ayon_applications import PreLaunchHook, LaunchTypes + + +class SetDefaultDisplayView(PreLaunchHook): + """Set default view and default display for houdini via OpenColorIO. + + Houdini's defaultDisplay and defaultView are set by + setting 'OCIO_ACTIVE_DISPLAYS' and 'OCIO_ACTIVE_VIEWS' + environment variables respectively. + + More info: https://www.sidefx.com/docs/houdini/io/ocio.html#set-up + """ + + app_groups = {"houdini"} + launch_types = {LaunchTypes.local} + + def execute(self): + + OCIO = self.launch_context.env.get("OCIO") + + # This is a cheap way to skip this hook if either global color + # management or houdini color management was disabled because the + # OCIO var would be set by the global OCIOEnvHook + if not OCIO: + return + + houdini_color_settings = \ + self.data["project_settings"]["houdini"]["imageio"]["workfile"] + + if not houdini_color_settings["enabled"]: + self.log.info( + "Houdini workfile color management is disabled." + ) + return + + # 'OCIO_ACTIVE_DISPLAYS', 'OCIO_ACTIVE_VIEWS' are checked + # as Admins can add them in Ayon env vars or Ayon tools. + + default_display = houdini_color_settings["default_display"] + if default_display: + # get 'OCIO_ACTIVE_DISPLAYS' value if exists. + self._set_context_env("OCIO_ACTIVE_DISPLAYS", default_display) + + default_view = houdini_color_settings["default_view"] + if default_view: + # get 'OCIO_ACTIVE_VIEWS' value if exists. + self._set_context_env("OCIO_ACTIVE_VIEWS", default_view) + + def _set_context_env(self, env_var, default_value): + env_value = self.launch_context.env.get(env_var, "") + new_value = ":".join( + key for key in [default_value, env_value] if key + ) + self.log.info( + "Setting {} environment to: {}" + .format(env_var, new_value) + ) + self.launch_context.env[env_var] = new_value diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py b/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py index b61b4cbd46..0ab5e2794e 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator plugin for creating alembic camera products.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance, CreatorError +from ayon_core.pipeline import CreatorError import hou @@ -23,7 +23,7 @@ class CreateAlembicCamera(plugin.HoudiniCreator): instance = super(CreateAlembicCamera, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) parms = { diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py index 6d992f136a..be5604c01c 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py @@ -29,7 +29,7 @@ class CreateArnoldAss(plugin.HoudiniCreator): instance = super(CreateArnoldAss, self).create( product_name, instance_data, - pre_create_data) # type: plugin.CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py index b7c5910a4f..f65b54a452 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py @@ -31,7 +31,7 @@ class CreateArnoldRop(plugin.HoudiniCreator): instance = super(CreateArnoldRop, self).create( product_name, instance_data, - pre_create_data) # type: plugin.CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py b/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py index 92c89c71cb..3749598b1d 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator plugin for creating pointcache bgeo files.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance, CreatorError +from ayon_core.pipeline import CreatorError import hou from ayon_core.lib import EnumDef, BoolDef @@ -25,7 +25,7 @@ class CreateBGEO(plugin.HoudiniCreator): instance = super(CreateBGEO, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_composite.py b/client/ayon_core/hosts/houdini/plugins/create/create_composite.py index a1104e5093..a25faf0e8e 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_composite.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_composite.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator plugin for creating composite sequences.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance, CreatorError +from ayon_core.pipeline import CreatorError import hou @@ -25,7 +25,7 @@ class CreateCompositeSequence(plugin.HoudiniCreator): instance = super(CreateCompositeSequence, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) filepath = "{}{}".format( diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_hda.py b/client/ayon_core/hosts/houdini/plugins/create/create_hda.py index b307293dc8..d399aa5e15 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_hda.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_hda.py @@ -78,7 +78,7 @@ class CreateHDA(plugin.HoudiniCreator): instance = super(CreateHDA, self).create( product_name, instance_data, - pre_create_data) # type: plugin.CreatedInstance + pre_create_data) return instance diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py index 9eb9d80cd3..e91ddbc0ac 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin to create Karma ROP.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance from ayon_core.lib import BoolDef, EnumDef, NumberDef @@ -25,7 +24,7 @@ class CreateKarmaROP(plugin.HoudiniCreator): instance = super(CreateKarmaROP, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py index bb10f3893c..e0cf035c35 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating pointcache alembics.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance from ayon_core.lib import BoolDef @@ -22,7 +21,7 @@ class CreateMantraIFD(plugin.HoudiniCreator): instance = super(CreateMantraIFD, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py index f15f49f463..64ecf428e9 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin to create Mantra ROP.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance from ayon_core.lib import EnumDef, BoolDef @@ -28,7 +27,7 @@ class CreateMantraROP(plugin.HoudiniCreator): instance = super(CreateMantraROP, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_review.py b/client/ayon_core/hosts/houdini/plugins/create/create_review.py index 18f7ce498d..94dcf23181 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_review.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_review.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating openGL reviews.""" -from ayon_core.hosts.houdini.api import plugin +from ayon_core.hosts.houdini.api import lib, plugin from ayon_core.lib import EnumDef, BoolDef, NumberDef import os @@ -14,6 +14,13 @@ class CreateReview(plugin.HoudiniCreator): label = "Review" product_type = "review" icon = "video-camera" + review_color_space = "" + + def apply_settings(self, project_settings): + super(CreateReview, self).apply_settings(project_settings) + color_settings = project_settings["houdini"]["imageio"]["workfile"] + if color_settings["enabled"]: + self.review_color_space = color_settings.get("review_color_space") def create(self, product_name, instance_data, pre_create_data): @@ -85,10 +92,20 @@ class CreateReview(plugin.HoudiniCreator): instance_node.setParms(parms) - # Set OCIO Colorspace to the default output colorspace + # Set OCIO Colorspace to the default colorspace # if there's OCIO if os.getenv("OCIO"): - self.set_colorcorrect_to_default_view_space(instance_node) + # Fall to the default value if cls.review_color_space is empty. + if not self.review_color_space: + # cls.review_color_space is an empty string + # when the imageio/workfile setting is disabled or + # when the Review colorspace setting is empty. + from ayon_core.hosts.houdini.api.colorspace import get_default_display_view_colorspace # noqa + self.review_color_space = get_default_display_view_colorspace() + + lib.set_review_color_space(instance_node, + self.review_color_space, + self.log) to_lock = ["id", "productType"] @@ -131,23 +148,3 @@ class CreateReview(plugin.HoudiniCreator): minimum=0.0001, decimals=3) ] - - def set_colorcorrect_to_default_view_space(self, - instance_node): - """Set ociocolorspace to the default output space.""" - from ayon_core.hosts.houdini.api.colorspace import get_default_display_view_colorspace # noqa - - # set Color Correction parameter to OpenColorIO - instance_node.setParms({"colorcorrect": 2}) - - # Get default view space for ociocolorspace parm. - default_view_space = get_default_display_view_colorspace() - instance_node.setParms( - {"ociocolorspace": default_view_space} - ) - - self.log.debug( - "'OCIO Colorspace' parm on '{}' has been set to " - "the default view color space '{}'" - .format(instance_node, default_view_space) - ) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_usd.py b/client/ayon_core/hosts/houdini/plugins/create/create_usd.py index ee05639368..700f7eefd6 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_usd.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_usd.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating USDs.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance import hou @@ -22,7 +21,7 @@ class CreateUSD(plugin.HoudiniCreator): instance = super(CreateUSD, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py b/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py index 0a5c8896a8..36197e349e 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating USD renders.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance class CreateUSDRender(plugin.HoudiniCreator): @@ -23,7 +22,7 @@ class CreateUSDRender(plugin.HoudiniCreator): instance = super(CreateUSDRender, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py b/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py index 9ac7ebdff7..c34cd2b4b5 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating VDB Caches.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance from ayon_core.lib import BoolDef import hou @@ -26,7 +25,7 @@ class CreateVDBCache(plugin.HoudiniCreator): instance = super(CreateVDBCache, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) file_path = "{}{}".format( diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py index 6b2396bffb..5ed9e848a7 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py @@ -3,7 +3,7 @@ import hou from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance, CreatorError +from ayon_core.pipeline import CreatorError from ayon_core.lib import EnumDef, BoolDef @@ -31,7 +31,7 @@ class CreateVrayROP(plugin.HoudiniCreator): instance = super(CreateVrayROP, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py b/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py index 1bb9043cd0..5f04781501 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py @@ -45,33 +45,11 @@ class AbcLoader(load.LoaderPlugin): alembic = container.createNode("alembic", node_name=node_name) alembic.setParms({"fileName": file_path}) - # Add unpack node - unpack_name = "unpack_{}".format(name) - unpack = container.createNode("unpack", node_name=unpack_name) - unpack.setInput(0, alembic) - unpack.setParms({"transfer_attributes": "path"}) + # Position nodes nicely + container.moveToGoodPosition() + container.layoutChildren() - # Add normal to points - # Order of menu ['point', 'vertex', 'prim', 'detail'] - normal_name = "normal_{}".format(name) - normal_node = container.createNode("normal", node_name=normal_name) - normal_node.setParms({"type": 0}) - - normal_node.setInput(0, unpack) - - null = container.createNode("null", node_name="OUT") - null.setInput(0, normal_node) - - # Ensure display flag is on the Alembic input node and not on the OUT - # node to optimize "debug" displaying in the viewport. - alembic.setDisplayFlag(True) - - # Set new position for unpack node else it gets cluttered - nodes = [container, alembic, unpack, normal_node, null] - for nr, node in enumerate(nodes): - node.setPosition([0, (0 - nr)]) - - self[:] = nodes + nodes = [container, alembic] return pipeline.containerise( node_name, diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_inputs.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_inputs.py index 7d7fabb315..6cf6bbf430 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_inputs.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_inputs.py @@ -1,9 +1,21 @@ +from collections import deque + import pyblish.api from ayon_core.pipeline import registered_host -def collect_input_containers(nodes): +def get_container_members(container): + node = container["node"] + # Usually the loaded containers don't have any complex references + # and the contained children should be all we need. So we disregard + # checking for .references() on the nodes. + members = set(node.allSubChildren()) + members.add(node) # include the node itself + return members + + +def collect_input_containers(containers, nodes): """Collect containers that contain any of the node in `nodes`. This will return any loaded Avalon container that contains at least one of @@ -11,30 +23,13 @@ def collect_input_containers(nodes): there are member nodes of that container. Returns: - list: Input avalon containers + list: Loaded containers that contain the `nodes` """ - - # Lookup by node ids - lookup = frozenset(nodes) - - containers = [] - host = registered_host() - for container in host.ls(): - - node = container["node"] - - # Usually the loaded containers don't have any complex references - # and the contained children should be all we need. So we disregard - # checking for .references() on the nodes. - members = set(node.allSubChildren()) - members.add(node) # include the node itself - - # If there's an intersection - if not lookup.isdisjoint(members): - containers.append(container) - - return containers + # Assume the containers have collected their cached '_members' data + # in the collector. + return [container for container in containers + if any(node in container["_members"] for node in nodes)] def iter_upstream(node): @@ -54,7 +49,7 @@ def iter_upstream(node): ) # Initialize process queue with the node's ancestors itself - queue = list(upstream) + queue = deque(upstream) collected = set(upstream) # Traverse upstream references for all nodes and yield them as we @@ -72,6 +67,10 @@ def iter_upstream(node): # Include the references' ancestors that have not been collected yet. for reference in references: + if reference in collected: + # Might have been collected in previous iteration + continue + ancestors = reference.inputAncestors( include_ref_inputs=True, follow_subnets=True ) @@ -108,13 +107,32 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): ) return - # Collect all upstream parents - nodes = list(iter_upstream(output)) - nodes.append(output) + # For large scenes the querying of "host.ls()" can be relatively slow + # e.g. up to a second. Many instances calling it easily slows this + # down. As such, we cache it so we trigger it only once. + # todo: Instead of hidden cache make "CollectContainers" plug-in + cache_key = "__cache_containers" + scene_containers = instance.context.data.get(cache_key, None) + if scene_containers is None: + # Query the scenes' containers if there's no cache yet + host = registered_host() + scene_containers = list(host.ls()) + for container in scene_containers: + # Embed the members into the container dictionary + container_members = set(get_container_members(container)) + container["_members"] = container_members + instance.context.data[cache_key] = scene_containers - # Collect containers for the given set of nodes - containers = collect_input_containers(nodes) + inputs = [] + if scene_containers: + # Collect all upstream parents + nodes = list(iter_upstream(output)) + nodes.append(output) + + # Collect containers for the given set of nodes + containers = collect_input_containers(scene_containers, nodes) + + inputs = [c["representation"] for c in containers] - inputs = [c["representation"] for c in containers] instance.data["inputRepresentations"] = inputs self.log.debug("Collected inputs: %s" % inputs) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/extract_composite.py b/client/ayon_core/hosts/houdini/plugins/publish/extract_composite.py index c6dfb4332d..0fab69ef4a 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/extract_composite.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/extract_composite.py @@ -7,7 +7,8 @@ from ayon_core.hosts.houdini.api.lib import render_rop, splitext import hou -class ExtractComposite(publish.Extractor): +class ExtractComposite(publish.Extractor, + publish.ColormanagedPyblishPluginMixin): order = pyblish.api.ExtractorOrder label = "Extract Composite (Image Sequence)" @@ -45,8 +46,14 @@ class ExtractComposite(publish.Extractor): "frameEnd": instance.data["frameEndHandle"], } - from pprint import pformat - - self.log.info(pformat(representation)) + if ext.lower() == "exr": + # Inject colorspace with 'scene_linear' as that's the + # default Houdini working colorspace and all extracted + # OpenEXR images should be in that colorspace. + # https://www.sidefx.com/docs/houdini/render/linear.html#image-formats + self.set_representation_colorspace( + representation, instance.context, + colorspace="scene_linear" + ) instance.data["representations"].append(representation) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/extract_opengl.py b/client/ayon_core/hosts/houdini/plugins/publish/extract_opengl.py index fabdfd9a9d..57bb8b881a 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/extract_opengl.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/extract_opengl.py @@ -8,7 +8,8 @@ from ayon_core.hosts.houdini.api.lib import render_rop import hou -class ExtractOpenGL(publish.Extractor): +class ExtractOpenGL(publish.Extractor, + publish.ColormanagedPyblishPluginMixin): order = pyblish.api.ExtractorOrder - 0.01 label = "Extract OpenGL" @@ -46,6 +47,14 @@ class ExtractOpenGL(publish.Extractor): "camera_name": instance.data.get("review_camera") } + if ropnode.evalParm("colorcorrect") == 2: # OpenColorIO enabled + colorspace = ropnode.evalParm("ociocolorspace") + # inject colorspace data + self.set_representation_colorspace( + representation, instance.context, + colorspace=colorspace + ) + if "representations" not in instance.data: instance.data["representations"] = [] instance.data["representations"].append(representation) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/increment_current_file.py b/client/ayon_core/hosts/houdini/plugins/publish/increment_current_file.py index 73145b211a..fe8fa25f10 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/increment_current_file.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/increment_current_file.py @@ -3,7 +3,6 @@ import pyblish.api from ayon_core.lib import version_up from ayon_core.pipeline import registered_host from ayon_core.pipeline.publish import get_errored_plugins_from_context -from ayon_core.hosts.houdini.api import HoudiniHost from ayon_core.pipeline.publish import KnownPublishError @@ -39,7 +38,7 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin): ) # Filename must not have changed since collecting - host = registered_host() # type: HoudiniHost + host = registered_host() current_file = host.current_file() if context.data["currentFile"] != current_file: raise KnownPublishError( diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py index fdf03d5cba..91bd36018a 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- -import sys +import hou import pyblish.api -import six from ayon_core.pipeline import PublishValidationError @@ -26,28 +25,21 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( - ("Output node(s) `{}` are incorrect. " - "See plug-in log for details.").format(invalid), - title=self.label + "Output node '{}' is incorrect. " + "See plug-in log for details.".format(invalid), + title=self.label, + description=( + "### Invalid COP output node\n\n" + "The output node path for the instance must be set to a " + "valid COP node path.\n\nSee the log for more details." + ) ) @classmethod def get_invalid(cls, instance): + output_node = instance.data.get("output_node") - import hou - - try: - output_node = instance.data["output_node"] - except KeyError: - six.reraise( - PublishValidationError, - PublishValidationError( - "Can't determine COP output node.", - title=cls.__name__), - sys.exc_info()[2] - ) - - if output_node is None: + if not output_node: node = hou.node(instance.data.get("instance_node")) cls.log.error( "COP Output node in '%s' does not exist. " @@ -61,8 +53,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): cls.log.error( "Output node %s is not a COP node. " "COP Path must point to a COP node, " - "instead found category type: %s" - % (output_node.path(), output_node.type().category().name()) + "instead found category type: %s", + output_node.path(), output_node.type().category().name() ) return [output_node.path()] @@ -70,9 +62,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): # is Cop2 to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category if output_node.type().category().name() != "Cop2": - raise PublishValidationError( - ( - "Output node {} is not of category Cop2." - " This is a bug..." - ).format(output_node.path()), - title=cls.label) + cls.log.error( + "Output node %s is not of category Cop2.", output_node.path() + ) + return [output_node.path()] diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py index 031138e21d..d3afa83b67 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py @@ -4,15 +4,19 @@ from ayon_core.pipeline import ( PublishValidationError, OptionalPyblishPluginMixin ) -from ayon_core.pipeline.publish import RepairAction +from ayon_core.pipeline.publish import ( + RepairAction, + get_plugin_settings, + apply_plugin_settings_automatically +) from ayon_core.hosts.houdini.api.action import SelectROPAction import os import hou -class SetDefaultViewSpaceAction(RepairAction): - label = "Set default view colorspace" +class ResetViewSpaceAction(RepairAction): + label = "Reset OCIO colorspace parm" icon = "mdi.monitor" @@ -27,9 +31,25 @@ class ValidateReviewColorspace(pyblish.api.InstancePlugin, families = ["review"] hosts = ["houdini"] label = "Validate Review Colorspace" - actions = [SetDefaultViewSpaceAction, SelectROPAction] + actions = [ResetViewSpaceAction, SelectROPAction] optional = True + review_color_space = "" + + @classmethod + def apply_settings(cls, project_settings): + # Preserve automatic settings applying logic + settings = get_plugin_settings(plugin=cls, + project_settings=project_settings, + log=cls.log, + category="houdini") + apply_plugin_settings_automatically(cls, settings, logger=cls.log) + + # Add review color settings + color_settings = project_settings["houdini"]["imageio"]["workfile"] + if color_settings["enabled"]: + cls.review_color_space = color_settings.get("review_color_space") + def process(self, instance): @@ -52,39 +72,54 @@ class ValidateReviewColorspace(pyblish.api.InstancePlugin, " 'OpenColorIO'".format(rop_node.path()) ) - if rop_node.evalParm("ociocolorspace") not in \ - hou.Color.ocio_spaces(): - + current_color_space = rop_node.evalParm("ociocolorspace") + if current_color_space not in hou.Color.ocio_spaces(): raise PublishValidationError( "Invalid value: Colorspace name doesn't exist.\n" "Check 'OCIO Colorspace' parameter on '{}' ROP" .format(rop_node.path()) ) - @classmethod - def repair(cls, instance): - """Set Default View Space Action. + # if houdini/imageio/workfile is enabled and + # Review colorspace setting is empty then this check should + # actually check if the current_color_space setting equals + # the default colorspace value. + # However, it will make the black cmd screen show up more often + # which is very annoying. + if self.review_color_space and \ + self.review_color_space != current_color_space: - It is a helper action more than a repair action, - used to set colorspace on opengl node to the default view. - """ - from ayon_core.hosts.houdini.api.colorspace import get_default_display_view_colorspace # noqa - - rop_node = hou.node(instance.data["instance_node"]) - - if rop_node.evalParm("colorcorrect") != 2: - rop_node.setParms({"colorcorrect": 2}) - cls.log.debug( - "'Color Correction' parm on '{}' has been set to" - " 'OpenColorIO'".format(rop_node.path()) + raise PublishValidationError( + "Invalid value: Colorspace name doesn't match" + "the Colorspace specified in settings." ) - # Get default view colorspace name - default_view_space = get_default_display_view_colorspace() + @classmethod + def repair(cls, instance): + """Reset view colorspace. - rop_node.setParms({"ociocolorspace": default_view_space}) - cls.log.info( - "'OCIO Colorspace' parm on '{}' has been set to " - "the default view color space '{}'" - .format(rop_node, default_view_space) - ) + It is used to set colorspace on opengl node. + + It uses the colorspace value specified in the Houdini addon settings. + If the value in the Houdini addon settings is empty, + it will fall to the default colorspace. + + Note: + This repair action assumes that OCIO is enabled. + As if OCIO is disabled the whole validation is skipped + and this repair action won't show up. + """ + from ayon_core.hosts.houdini.api.lib import set_review_color_space + + # Fall to the default value if cls.review_color_space is empty. + if not cls.review_color_space: + # cls.review_color_space is an empty string + # when the imageio/workfile setting is disabled or + # when the Review colorspace setting is empty. + from ayon_core.hosts.houdini.api.colorspace import get_default_display_view_colorspace # noqa + cls.review_color_space = get_default_display_view_colorspace() + + rop_node = hou.node(instance.data["instance_node"]) + set_review_color_space(rop_node, + cls.review_color_space, + cls.log) diff --git a/client/ayon_core/hosts/max/api/lib.py b/client/ayon_core/hosts/max/api/lib.py index 48bb15f538..ea17d1df05 100644 --- a/client/ayon_core/hosts/max/api/lib.py +++ b/client/ayon_core/hosts/max/api/lib.py @@ -8,10 +8,15 @@ from typing import Any, Dict, Union import six import ayon_api -from ayon_core.pipeline import get_current_project_name, colorspace +from ayon_core.pipeline import ( + get_current_project_name, + get_current_folder_path, + get_current_task_name, + colorspace +) from ayon_core.settings import get_project_settings from ayon_core.pipeline.context_tools import ( - get_current_folder_entity, + get_current_task_entity ) from ayon_core.style import load_stylesheet from pymxs import runtime as rt @@ -221,41 +226,30 @@ def reset_scene_resolution(): scene resolution can be overwritten by a folder if the folder.attrib contains any information regarding scene resolution. """ - - folder_entity = get_current_folder_entity( - fields={"attrib.resolutionWidth", "attrib.resolutionHeight"} - ) - folder_attributes = folder_entity["attrib"] - width = int(folder_attributes["resolutionWidth"]) - height = int(folder_attributes["resolutionHeight"]) + task_attributes = get_current_task_entity(fields={"attrib"})["attrib"] + width = int(task_attributes["resolutionWidth"]) + height = int(task_attributes["resolutionHeight"]) set_scene_resolution(width, height) -def get_frame_range(folder_entiy=None) -> Union[Dict[str, Any], None]: - """Get the current folder frame range and handles. +def get_frame_range(task_entity=None) -> Union[Dict[str, Any], None]: + """Get the current task frame range and handles Args: - folder_entiy (dict): Folder eneity. + task_entity (dict): Task Entity. Returns: dict: with frame start, frame end, handle start, handle end. """ # Set frame start/end - if folder_entiy is None: - folder_entiy = get_current_folder_entity() - - folder_attributes = folder_entiy["attrib"] - frame_start = folder_attributes.get("frameStart") - frame_end = folder_attributes.get("frameEnd") - - if frame_start is None or frame_end is None: - return {} - - frame_start = int(frame_start) - frame_end = int(frame_end) - handle_start = int(folder_attributes.get("handleStart", 0)) - handle_end = int(folder_attributes.get("handleEnd", 0)) + if task_entity is None: + task_entity = get_current_task_entity(fields={"attrib"}) + task_attributes = task_entity["attrib"] + frame_start = int(task_attributes["frameStart"]) + frame_end = int(task_attributes["frameEnd"]) + handle_start = int(task_attributes["handleStart"]) + handle_end = int(task_attributes["handleEnd"]) frame_start_handle = frame_start - handle_start frame_end_handle = frame_end + handle_end @@ -281,9 +275,9 @@ def reset_frame_range(fps: bool = True): scene frame rate in frames-per-second. """ if fps: - project_name = get_current_project_name() - project_entity = ayon_api.get_project(project_name) - fps_number = float(project_entity["attrib"].get("fps")) + task_entity = get_current_task_entity() + task_attributes = task_entity["attrib"] + fps_number = float(task_attributes["fps"]) rt.frameRate = fps_number frame_range = get_frame_range() @@ -525,6 +519,36 @@ def get_plugins() -> list: return plugin_info_list +def update_modifier_node_names(event, node): + """Update the name of the nodes after renaming + + Args: + event (pymxs.MXSWrapperBase): Event Name ( + Mandatory argument for rt.NodeEventCallback) + node (list): Event Number ( + Mandatory argument for rt.NodeEventCallback) + + """ + containers = [ + obj + for obj in rt.Objects + if ( + rt.ClassOf(obj) == rt.Container + and rt.getUserProp(obj, "id") == "pyblish.avalon.instance" + and rt.getUserProp(obj, "productType") not in { + "workfile", "tyflow" + } + ) + ] + if not containers: + return + for container in containers: + ayon_data = container.modifiers[0].openPypeData + updated_node_names = [str(node.node) for node + in ayon_data.all_handles] + rt.setProperty(ayon_data, "sel_list", updated_node_names) + + @contextlib.contextmanager def render_resolution(width, height): """Set render resolution option during context diff --git a/client/ayon_core/hosts/max/api/pipeline.py b/client/ayon_core/hosts/max/api/pipeline.py index 675f36c24f..dc13f47795 100644 --- a/client/ayon_core/hosts/max/api/pipeline.py +++ b/client/ayon_core/hosts/max/api/pipeline.py @@ -63,6 +63,8 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): rt.callbacks.addScript(rt.Name('postWorkspaceChange'), self._deferred_menu_creation) + rt.NodeEventCallback( + nameChanged=lib.update_modifier_node_names) def workfile_has_unsaved_changes(self): return rt.getSaveRequired() diff --git a/client/ayon_core/hosts/max/plugins/publish/extract_alembic.py b/client/ayon_core/hosts/max/plugins/publish/extract_alembic.py index 67b5174200..67cec23ecc 100644 --- a/client/ayon_core/hosts/max/plugins/publish/extract_alembic.py +++ b/client/ayon_core/hosts/max/plugins/publish/extract_alembic.py @@ -53,6 +53,7 @@ class ExtractAlembic(publish.Extractor, hosts = ["max"] families = ["pointcache"] optional = True + active = True def process(self, instance): if not self.is_active(instance.data): @@ -102,24 +103,27 @@ class ExtractAlembic(publish.Extractor, @classmethod def get_attribute_defs(cls): - return [ + defs = super(ExtractAlembic, cls).get_attribute_defs() + defs.extend([ BoolDef("custom_attrs", label="Custom Attributes", default=False), - ] + ]) + return defs class ExtractCameraAlembic(ExtractAlembic): """Extract Camera with AlembicExport.""" - label = "Extract Alembic Camera" families = ["camera"] + optional = True -class ExtractModel(ExtractAlembic): +class ExtractModelAlembic(ExtractAlembic): """Extract Geometry in Alembic Format""" label = "Extract Geometry (Alembic)" families = ["model"] + optional = True def _set_abc_attributes(self, instance): attr_values = self.get_attr_values_from_data(instance.data) diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_frame_range.py b/client/ayon_core/hosts/max/plugins/publish/validate_frame_range.py index 2f4ec5f86c..11b55232d5 100644 --- a/client/ayon_core/hosts/max/plugins/publish/validate_frame_range.py +++ b/client/ayon_core/hosts/max/plugins/publish/validate_frame_range.py @@ -42,7 +42,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin, return frame_range = get_frame_range( - instance.data["folderEntity"]) + instance.data["taskEntity"]) inst_frame_start = instance.data.get("frameStartHandle") inst_frame_end = instance.data.get("frameEndHandle") diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py b/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py index cecfd5fd12..5107665235 100644 --- a/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py +++ b/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py @@ -38,7 +38,7 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin, context_label = "{} > {}".format(*context) instance_label = "{} > {}".format(folderPath, task) message = ( - "Instance '{}' publishes to different folder or task " + "Instance '{}' publishes to different context(folder or task) " "than current context: {}. Current context: {}".format( instance.name, instance_label, context_label ) @@ -46,7 +46,7 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin, raise PublishValidationError( message=message, description=( - "## Publishing to a different context folder or task\n" + "## Publishing to a different context data(folder or task)\n" "There are publish instances present which are publishing " "into a different folder path or task than your current context.\n\n" "Usually this is not what you want but there can be cases " diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_resolution_setting.py b/client/ayon_core/hosts/max/plugins/publish/validate_resolution_setting.py index f499f851f1..5f6cd0a21d 100644 --- a/client/ayon_core/hosts/max/plugins/publish/validate_resolution_setting.py +++ b/client/ayon_core/hosts/max/plugins/publish/validate_resolution_setting.py @@ -7,7 +7,10 @@ from ayon_core.pipeline.publish import ( RepairAction, PublishValidationError ) -from ayon_core.hosts.max.api.lib import reset_scene_resolution +from ayon_core.hosts.max.api.lib import ( + reset_scene_resolution, + imprint +) class ValidateResolutionSetting(pyblish.api.InstancePlugin, @@ -25,8 +28,10 @@ class ValidateResolutionSetting(pyblish.api.InstancePlugin, if not self.is_active(instance.data): return width, height = self.get_folder_resolution(instance) - current_width = rt.renderWidth - current_height = rt.renderHeight + current_width, current_height = ( + self.get_current_resolution(instance) + ) + if current_width != width and current_height != height: raise PublishValidationError("Resolution Setting " "not matching resolution " @@ -41,12 +46,16 @@ class ValidateResolutionSetting(pyblish.api.InstancePlugin, "not matching resolution set " "on asset or shot.") - def get_folder_resolution(self, instance): - folder_entity = instance.data["folderEntity"] - if folder_entity: - folder_attributes = folder_entity["attrib"] - width = folder_attributes["resolutionWidth"] - height = folder_attributes["resolutionHeight"] + def get_current_resolution(self, instance): + return rt.renderWidth, rt.renderHeight + + @classmethod + def get_folder_resolution(cls, instance): + task_entity = instance.data.get("taskEntity") + if task_entity: + task_attributes = task_entity["attrib"] + width = task_attributes["resolutionWidth"] + height = task_attributes["resolutionHeight"] return int(width), int(height) # Defaults if not found in folder entity @@ -55,3 +64,29 @@ class ValidateResolutionSetting(pyblish.api.InstancePlugin, @classmethod def repair(cls, instance): reset_scene_resolution() + + +class ValidateReviewResolutionSetting(ValidateResolutionSetting): + families = ["review"] + optional = True + actions = [RepairAction] + + def get_current_resolution(self, instance): + current_width = instance.data["review_width"] + current_height = instance.data["review_height"] + return current_width, current_height + + @classmethod + def repair(cls, instance): + context_width, context_height = ( + cls.get_folder_resolution(instance) + ) + creator_attrs = instance.data["creator_attributes"] + creator_attrs["review_width"] = context_width + creator_attrs["review_height"] = context_height + creator_attrs_data = { + "creator_attributes": creator_attrs + } + # update the width and height of review + # data in creator_attributes + imprint(instance.data["instance_node"], creator_attrs_data) diff --git a/client/ayon_core/hosts/maya/api/fbx.py b/client/ayon_core/hosts/maya/api/fbx.py index 97e95d2ec4..939da4011b 100644 --- a/client/ayon_core/hosts/maya/api/fbx.py +++ b/client/ayon_core/hosts/maya/api/fbx.py @@ -2,8 +2,6 @@ """Tools to work with FBX.""" import logging -from pyblish.api import Instance - from maya import cmds # noqa import maya.mel as mel # noqa from ayon_core.hosts.maya.api.lib import maintained_selection @@ -146,7 +144,6 @@ class FBXExtractor: return options def set_options_from_instance(self, instance): - # type: (Instance) -> None """Sets FBX export options from data in the instance. Args: diff --git a/client/ayon_core/hosts/maya/api/lib.py b/client/ayon_core/hosts/maya/api/lib.py index ff5bee03ca..321bcbc0b5 100644 --- a/client/ayon_core/hosts/maya/api/lib.py +++ b/client/ayon_core/hosts/maya/api/lib.py @@ -1917,6 +1917,29 @@ def apply_attributes(attributes, nodes_by_id): set_attribute(attr, value, node) +def is_valid_reference_node(reference_node): + """Return whether Maya considers the reference node a valid reference. + + Maya might report an error when using `maya.cmds.referenceQuery`: + Reference node 'reference_node' is not associated with a reference file. + + Note that this does *not* check whether the reference node points to an + existing file. Instead it only returns whether maya considers it valid + and thus is not an unassociated reference node + + Arguments: + reference_node (str): Reference node name + + Returns: + bool: Whether reference node is a valid reference + + """ + sel = OpenMaya.MSelectionList() + sel.add(reference_node) + depend_node = sel.getDependNode(0) + return OpenMaya.MFnReference(depend_node).isValidReference() + + def get_container_members(container): """Returns the members of a container. This includes the nodes from any loaded references in the container. @@ -1942,7 +1965,16 @@ def get_container_members(container): if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"): continue - reference_members = cmds.referenceQuery(ref, nodes=True, dagPath=True) + try: + reference_members = cmds.referenceQuery(ref, + nodes=True, + dagPath=True) + except RuntimeError: + # Ignore reference nodes that are not associated with a + # referenced file on which `referenceQuery` command fails + if not is_valid_reference_node(ref): + continue + raise reference_members = cmds.ls(reference_members, long=True, objectsOnly=True) @@ -4238,6 +4270,9 @@ def get_reference_node(members, log=None): if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"): continue + if not is_valid_reference_node(ref): + continue + references.add(ref) assert references, "No reference node found in container" @@ -4268,15 +4303,19 @@ def get_reference_node_parents(ref): list: The upstream parent reference nodes. """ - parent = cmds.referenceQuery(ref, - referenceNode=True, - parent=True) + def _get_parent(reference_node): + """Return parent reference node, but ignore invalid reference nodes""" + if not is_valid_reference_node(reference_node): + return + return cmds.referenceQuery(reference_node, + referenceNode=True, + parent=True) + + parent = _get_parent(ref) parents = [] while parent: parents.append(parent) - parent = cmds.referenceQuery(parent, - referenceNode=True, - parent=True) + parent = _get_parent(parent) return parents diff --git a/client/ayon_core/hosts/maya/api/lib_renderproducts.py b/client/ayon_core/hosts/maya/api/lib_renderproducts.py index 832d1c21c2..52c282c6de 100644 --- a/client/ayon_core/hosts/maya/api/lib_renderproducts.py +++ b/client/ayon_core/hosts/maya/api/lib_renderproducts.py @@ -720,7 +720,8 @@ class RenderProductsArnold(ARenderProducts): # AOVs > Legacy > Maya Render View > Mode aovs_enabled = bool( - self._get_attr("defaultArnoldRenderOptions.aovMode") + self._get_attr( + "defaultArnoldRenderOptions.aovMode", as_string=False) ) if not aovs_enabled: return beauty_products diff --git a/client/ayon_core/hosts/maya/api/render_setup_tools.py b/client/ayon_core/hosts/maya/api/render_setup_tools.py index a5e04de184..9b00b53eee 100644 --- a/client/ayon_core/hosts/maya/api/render_setup_tools.py +++ b/client/ayon_core/hosts/maya/api/render_setup_tools.py @@ -19,7 +19,7 @@ from .lib import pairwise @contextlib.contextmanager -def _allow_export_from_render_setup_layer(): +def allow_export_from_render_setup_layer(): """Context manager to override Maya settings to allow RS layer export""" try: @@ -102,7 +102,7 @@ def export_in_rs_layer(path, nodes, export=None): cmds.disconnectAttr(src, dest) # Export Selected - with _allow_export_from_render_setup_layer(): + with allow_export_from_render_setup_layer(): cmds.select(nodes, noExpand=True) if export: export() diff --git a/client/ayon_core/hosts/maya/api/workfile_template_builder.py b/client/ayon_core/hosts/maya/api/workfile_template_builder.py index 75386d7e64..ddf19125e3 100644 --- a/client/ayon_core/hosts/maya/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/maya/api/workfile_template_builder.py @@ -331,7 +331,8 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): if scene_parent: cmds.parent(node, scene_parent) else: - cmds.parent(node, world=True) + if cmds.listRelatives(node, parent=True): + cmds.parent(node, world=True) holding_sets = cmds.listSets(object=placeholder.scene_identifier) if not holding_sets: diff --git a/client/ayon_core/hosts/maya/api/yeti.py b/client/ayon_core/hosts/maya/api/yeti.py new file mode 100644 index 0000000000..1526c3a2f3 --- /dev/null +++ b/client/ayon_core/hosts/maya/api/yeti.py @@ -0,0 +1,101 @@ +from typing import List + +from maya import cmds + + +def get_yeti_user_variables(yeti_shape_node: str) -> List[str]: + """Get user defined yeti user variables for a `pgYetiMaya` shape node. + + Arguments: + yeti_shape_node (str): The `pgYetiMaya` shape node. + + Returns: + list: Attribute names (for a vector attribute it only lists the top + parent attribute, not the attribute per axis) + """ + + attrs = cmds.listAttr(yeti_shape_node, + userDefined=True, + string=("yetiVariableV_*", + "yetiVariableF_*")) or [] + valid_attrs = [] + for attr in attrs: + attr_type = cmds.attributeQuery(attr, node=yeti_shape_node, + attributeType=True) + if attr.startswith("yetiVariableV_") and attr_type == "double3": + # vector + valid_attrs.append(attr) + elif attr.startswith("yetiVariableF_") and attr_type == "double": + valid_attrs.append(attr) + + return valid_attrs + + +def create_yeti_variable(yeti_shape_node: str, + attr_name: str, + value=None, + force_value: bool = False) -> bool: + """Get user defined yeti user variables for a `pgYetiMaya` shape node. + + Arguments: + yeti_shape_node (str): The `pgYetiMaya` shape node. + attr_name (str): The fully qualified yeti variable name, e.g. + "yetiVariableF_myfloat" or "yetiVariableV_myvector" + value (object): The value to set (must match the type of the attribute) + When value is None it will ignored and not be set. + force_value (bool): Whether to set the value if the attribute already + exists or not. + + Returns: + bool: Whether the attribute value was set or not. + + """ + exists = cmds.attributeQuery(attr_name, node=yeti_shape_node, exists=True) + if not exists: + if attr_name.startswith("yetiVariableV_"): + _create_vector_yeti_user_variable(yeti_shape_node, attr_name) + if attr_name.startswith("yetiVariableF_"): + _create_float_yeti_user_variable(yeti_shape_node, attr_name) + + if value is not None and (not exists or force_value): + plug = "{}.{}".format(yeti_shape_node, attr_name) + if ( + isinstance(value, (list, tuple)) + and attr_name.startswith("yetiVariableV_") + ): + cmds.setAttr(plug, *value, type="double3") + else: + cmds.setAttr(plug, value) + + return True + return False + + +def _create_vector_yeti_user_variable(yeti_shape_node: str, attr_name: str): + if not attr_name.startswith("yetiVariableV_"): + raise ValueError("Must start with yetiVariableV_") + cmds.addAttr(yeti_shape_node, + longName=attr_name, + attributeType="double3", + cachedInternally=True, + keyable=True) + for axis in "XYZ": + cmds.addAttr(yeti_shape_node, + longName="{}{}".format(attr_name, axis), + attributeType="double", + parent=attr_name, + cachedInternally=True, + keyable=True) + + +def _create_float_yeti_user_variable(yeti_node: str, attr_name: str): + if not attr_name.startswith("yetiVariableF_"): + raise ValueError("Must start with yetiVariableF_") + + cmds.addAttr(yeti_node, + longName=attr_name, + attributeType="double", + cachedInternally=True, + softMinValue=0, + softMaxValue=100, + keyable=True) diff --git a/client/ayon_core/hosts/maya/plugins/inventory/connect_geometry.py b/client/ayon_core/hosts/maya/plugins/inventory/connect_geometry.py index 839a4dad90..5410546a2e 100644 --- a/client/ayon_core/hosts/maya/plugins/inventory/connect_geometry.py +++ b/client/ayon_core/hosts/maya/plugins/inventory/connect_geometry.py @@ -37,7 +37,7 @@ class ConnectGeometry(InventoryAction): repre_id = container["representation"] repre_context = repre_contexts_by_id[repre_id] - product_type = repre_context["prouct"]["productType"] + product_type = repre_context["product"]["productType"] containers_by_product_type.setdefault(product_type, []) containers_by_product_type[product_type].append(container) diff --git a/client/ayon_core/hosts/maya/plugins/inventory/connect_xgen.py b/client/ayon_core/hosts/maya/plugins/inventory/connect_xgen.py index bf9e679928..166c419072 100644 --- a/client/ayon_core/hosts/maya/plugins/inventory/connect_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/inventory/connect_xgen.py @@ -36,7 +36,7 @@ class ConnectXgen(InventoryAction): repre_id = container["representation"] repre_context = repre_contexts_by_id[repre_id] - product_type = repre_context["prouct"]["productType"] + product_type = repre_context["product"]["productType"] containers_by_product_type.setdefault(product_type, []) containers_by_product_type[product_type].append(container) diff --git a/client/ayon_core/hosts/maya/plugins/inventory/connect_yeti_rig.py b/client/ayon_core/hosts/maya/plugins/inventory/connect_yeti_rig.py index 5916bf7b97..8f13cc6ae5 100644 --- a/client/ayon_core/hosts/maya/plugins/inventory/connect_yeti_rig.py +++ b/client/ayon_core/hosts/maya/plugins/inventory/connect_yeti_rig.py @@ -39,7 +39,7 @@ class ConnectYetiRig(InventoryAction): repre_id = container["representation"] repre_context = repre_contexts_by_id[repre_id] - product_type = repre_context["prouct"]["productType"] + product_type = repre_context["product"]["productType"] containers_by_product_type.setdefault(product_type, []) containers_by_product_type[product_type].append(container) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_as_template.py b/client/ayon_core/hosts/maya/plugins/load/load_as_template.py new file mode 100644 index 0000000000..f696d369e3 --- /dev/null +++ b/client/ayon_core/hosts/maya/plugins/load/load_as_template.py @@ -0,0 +1,39 @@ +from ayon_core.lib import ( + BoolDef +) +from ayon_core.pipeline import ( + load, + registered_host +) +from ayon_core.hosts.maya.api.workfile_template_builder import ( + MayaTemplateBuilder +) + + +class LoadAsTemplate(load.LoaderPlugin): + """Load workfile as a template """ + + product_types = {"workfile", "mayaScene"} + label = "Load as template" + representations = ["ma", "mb"] + icon = "wrench" + color = "#775555" + order = 10 + + options = [ + BoolDef("keep_placeholders", + label="Keep Placeholders", + default=False), + BoolDef("create_first_version", + label="Create First Version", + default=False), + ] + + def load(self, context, name, namespace, data): + keep_placeholders = data.get("keep_placeholders", False) + create_first_version = data.get("create_first_version", False) + path = self.filepath_from_context(context) + builder = MayaTemplateBuilder(registered_host()) + builder.build_template(template_path=path, + keep_placeholders=keep_placeholders, + create_first_version=create_first_version) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py b/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py index caea6b7a72..4ca9ae9d03 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py @@ -12,6 +12,7 @@ from ayon_core.pipeline import ( get_representation_path ) from ayon_core.hosts.maya.api import lib +from ayon_core.hosts.maya.api.yeti import create_yeti_variable from ayon_core.hosts.maya.api.pipeline import containerise from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type @@ -23,8 +24,19 @@ SKIP_UPDATE_ATTRS = { "viewportDensity", "viewportWidth", "viewportLength", + "renderDensity", + "renderWidth", + "renderLength", + "increaseRenderBounds" } +SKIP_ATTR_MESSAGE = ( + "Skipping updating %s.%s to %s because it " + "is considered a local overridable attribute. " + "Either set manually or the load the cache " + "anew." +) + def set_attribute(node, attr, value): """Wrapper of set attribute which ignores None values""" @@ -209,9 +221,31 @@ class YetiCacheLoader(load.LoaderPlugin): for attr, value in node_settings["attrs"].items(): if attr in SKIP_UPDATE_ATTRS: + self.log.info( + SKIP_ATTR_MESSAGE, yeti_node, attr, value + ) continue set_attribute(attr, value, yeti_node) + # Set up user defined attributes + user_variables = node_settings.get("user_variables", {}) + for attr, value in user_variables.items(): + was_value_set = create_yeti_variable( + yeti_shape_node=yeti_node, + attr_name=attr, + value=value, + # We do not want to update the + # value if it already exists so + # that any local overrides that + # may have been applied still + # persist + force_value=False + ) + if not was_value_set: + self.log.info( + SKIP_ATTR_MESSAGE, yeti_node, attr, value + ) + cmds.setAttr("{}.representation".format(container_node), repre_entity["id"], typ="string") @@ -332,6 +366,13 @@ class YetiCacheLoader(load.LoaderPlugin): for attr, value in attributes.items(): set_attribute(attr, value, yeti_node) + # Set up user defined attributes + user_variables = node_settings.get("user_variables", {}) + for attr, value in user_variables.items(): + create_yeti_variable(yeti_shape_node=yeti_node, + attr_name=attr, + value=value) + # Connect to the time node cmds.connectAttr("time1.outTime", "%s.currentTime" % yeti_node) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py b/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py index bf9525bae3..7444566ee1 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py @@ -1,8 +1,13 @@ +from typing import List + import maya.cmds as cmds from ayon_core.hosts.maya.api import plugin from ayon_core.hosts.maya.api import lib +from ayon_core.pipeline import registered_host +from ayon_core.pipeline.create import CreateContext + class YetiRigLoader(plugin.ReferenceLoader): """This loader will load Yeti rig.""" @@ -15,6 +20,9 @@ class YetiRigLoader(plugin.ReferenceLoader): icon = "code-fork" color = "orange" + # From settings + create_cache_instance_on_load = True + def process_reference( self, context, name=None, namespace=None, options=None ): @@ -49,4 +57,41 @@ class YetiRigLoader(plugin.ReferenceLoader): ) self[:] = nodes + if self.create_cache_instance_on_load: + # Automatically create in instance to allow publishing the loaded + # yeti rig into a yeti cache + self._create_yeti_cache_instance(nodes, variant=namespace) + return nodes + + def _create_yeti_cache_instance(self, nodes: List[str], variant: str): + """Create a yeticache product type instance to publish the output. + + This is similar to how loading animation rig will automatically create + an animation instance for publishing any loaded character rigs, but + then for yeti rigs. + + Args: + nodes (List[str]): Nodes generated on load. + variant (str): Variant for the yeti cache instance to create. + + """ + + # Find the roots amongst the loaded nodes + yeti_nodes = cmds.ls(nodes, type="pgYetiMaya", long=True) + assert yeti_nodes, "No pgYetiMaya nodes in rig, this is a bug." + + self.log.info("Creating variant: {}".format(variant)) + + creator_identifier = "io.openpype.creators.maya.yeticache" + + host = registered_host() + create_context = CreateContext(host) + + with lib.maintained_selection(): + cmds.select(yeti_nodes, noExpand=True) + create_context.create( + creator_identifier=creator_identifier, + variant=variant, + pre_create_data={"use_selection": True} + ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py b/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py index 93b46c511b..60853bd1ee 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py @@ -12,7 +12,7 @@ class CollectFileDependencies(pyblish.api.ContextPlugin): families = ["renderlayer"] @classmethod - def apply_settings(cls, project_settings, system_settings): + def apply_settings(cls, project_settings): # Disable plug-in if not used for deadline submission anyway settings = project_settings["deadline"]["publish"]["MayaSubmitDeadline"] # noqa cls.enabled = settings.get("asset_dependencies", True) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_yeti_cache.py b/client/ayon_core/hosts/maya/plugins/publish/collect_yeti_cache.py index 067a7bc532..e1755e4212 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_yeti_cache.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_yeti_cache.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api from ayon_core.hosts.maya.api import lib +from ayon_core.hosts.maya.api.yeti import get_yeti_user_variables SETTINGS = { @@ -34,7 +35,7 @@ class CollectYetiCache(pyblish.api.InstancePlugin): - "increaseRenderBounds" - "imageSearchPath" - Other information is the name of the transform and it's Colorbleed ID + Other information is the name of the transform and its `cbId` """ order = pyblish.api.CollectorOrder + 0.45 @@ -54,6 +55,16 @@ class CollectYetiCache(pyblish.api.InstancePlugin): # Get specific node attributes attr_data = {} for attr in SETTINGS: + # Ignore non-existing attributes with a warning, e.g. cbId + # if they have not been generated yet + if not cmds.attributeQuery(attr, node=shape, exists=True): + self.log.warning( + "Attribute '{}' not found on Yeti node: {}".format( + attr, shape + ) + ) + continue + current = cmds.getAttr("%s.%s" % (shape, attr)) # change None to empty string as Maya doesn't support # NoneType in attributes @@ -61,6 +72,12 @@ class CollectYetiCache(pyblish.api.InstancePlugin): current = "" attr_data[attr] = current + # Get user variable attributes + user_variable_attrs = { + attr: lib.get_attribute("{}.{}".format(shape, attr)) + for attr in get_yeti_user_variables(shape) + } + # Get transform data parent = cmds.listRelatives(shape, parent=True)[0] transform_data = {"name": parent, "cbId": lib.get_id(parent)} @@ -70,6 +87,7 @@ class CollectYetiCache(pyblish.api.InstancePlugin): "name": shape, "cbId": lib.get_id(shape), "attrs": attr_data, + "user_variables": user_variable_attrs } settings["nodes"].append(shape_data) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py b/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py index c4af2914cd..cb3951ec0c 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py @@ -299,4 +299,10 @@ def transfer_image_planes(source_cameras, target_cameras, def _attach_image_plane(camera, image_plane): cmds.imagePlane(image_plane, edit=True, detach=True) + + # Attaching to a camera resets it to identity size, so we counter that + size_x = cmds.getAttr(f"{image_plane}.sizeX") + size_y = cmds.getAttr(f"{image_plane}.sizeY") cmds.imagePlane(image_plane, edit=True, camera=camera) + cmds.setAttr(f"{image_plane}.sizeX", size_x) + cmds.setAttr(f"{image_plane}.sizeY", size_y) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_redshift_proxy.py b/client/ayon_core/hosts/maya/plugins/publish/extract_redshift_proxy.py index 9286869c60..66dd805437 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_redshift_proxy.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_redshift_proxy.py @@ -5,7 +5,13 @@ import os from maya import cmds from ayon_core.pipeline import publish -from ayon_core.hosts.maya.api.lib import maintained_selection +from ayon_core.hosts.maya.api.lib import ( + maintained_selection, + renderlayer +) +from ayon_core.hosts.maya.api.render_setup_tools import ( + allow_export_from_render_setup_layer +) class ExtractRedshiftProxy(publish.Extractor): @@ -18,6 +24,9 @@ class ExtractRedshiftProxy(publish.Extractor): def process(self, instance): """Extractor entry point.""" + # Make sure Redshift is loaded + cmds.loadPlugin("redshift4maya", quiet=True) + staging_dir = self.staging_dir(instance) file_name = "{}.rs".format(instance.name) file_path = os.path.join(staging_dir, file_name) @@ -60,14 +69,22 @@ class ExtractRedshiftProxy(publish.Extractor): # Write out rs file self.log.debug("Writing: '%s'" % file_path) + + # Allow overriding what renderlayer to export from. By default force + # it to the default render layer. (Note that the renderlayer isn't + # currently exposed as an attribute to artists) + layer = instance.data.get("renderLayer", "defaultRenderLayer") + with maintained_selection(): - cmds.select(instance.data["setMembers"], noExpand=True) - cmds.file(file_path, - pr=False, - force=True, - type="Redshift Proxy", - exportSelected=True, - options=rs_options) + with renderlayer(layer): + with allow_export_from_render_setup_layer(): + cmds.select(instance.data["setMembers"], noExpand=True) + cmds.file(file_path, + preserveReferences=False, + force=True, + type="Redshift Proxy", + exportSelected=True, + options=rs_options) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py b/client/ayon_core/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py index d8a9222c36..cfd4156124 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py @@ -2,7 +2,6 @@ from maya import cmds import pyblish.api from ayon_core.pipeline.publish import ( - ValidateContentsOrder, RepairContextAction, PublishValidationError ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py index d1d7e49fa4..58d015e962 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py @@ -45,6 +45,11 @@ class ValidateMeshNgons(pyblish.api.InstancePlugin, # Get all faces faces = ['{0}.f[*]'.format(node) for node in meshes] + # Skip meshes that for some reason have no faces, e.g. empty meshes + faces = cmds.ls(faces) + if not faces: + return [] + # Filter to n-sided polygon faces (ngons) invalid = lib.polyConstraint(faces, t=0x0008, # type=face diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py index a139b65169..305a58d78e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py @@ -1,3 +1,5 @@ +import inspect + from maya import cmds import pyblish.api @@ -29,8 +31,8 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, RepairAction] - @staticmethod - def get_invalid(instance): + @classmethod + def get_invalid(cls, instance): meshes = cmds.ls(instance, type='mesh', long=True) @@ -40,6 +42,11 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, # Get existing mapping of uv sets by index indices = cmds.polyUVSet(mesh, query=True, allUVSetsIndices=True) maps = cmds.polyUVSet(mesh, query=True, allUVSets=True) + if not indices or not maps: + cls.log.warning("Mesh has no UV set: %s", mesh) + invalid.append(mesh) + continue + mapping = dict(zip(indices, maps)) # Get the uv set at index zero. @@ -56,8 +63,14 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, invalid = self.get_invalid(instance) if invalid: + + invalid_list = "\n".join(f"- {node}" for node in invalid) + raise PublishValidationError( - "Meshes found without 'map1' UV set: {0}".format(invalid)) + "Meshes found without 'map1' UV set:\n" + "{0}".format(invalid_list), + description=self.get_description() + ) @classmethod def repair(cls, instance): @@ -68,6 +81,12 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, # Get existing mapping of uv sets by index indices = cmds.polyUVSet(mesh, query=True, allUVSetsIndices=True) maps = cmds.polyUVSet(mesh, query=True, allUVSets=True) + if not indices or not maps: + # No UV set exist at all, create a `map1` uv set + # This may fail silently if the mesh has no geometry at all + cmds.polyUVSet(mesh, create=True, uvSet="map1") + continue + mapping = dict(zip(indices, maps)) # Ensure there is no uv set named map1 to avoid @@ -97,3 +116,23 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, rename=True, uvSet=original, newUVSet="map1") + + @staticmethod + def get_description(): + return inspect.cleandoc("""### Mesh found without map1 uv set + + A mesh must have a default UV set named `map1` to adhere to the default + mesh behavior of Maya meshes. + + There may be meshes that: + - Have no UV set + - Have no `map1` uv set but are using a different name + - Have a `map1` uv set, but it's not the default (first index) + + + #### Repair + + Using repair will try to make the first UV set the `map1` uv set. If it + does not exist yet it will be created or renames the current first + UV set to `map1`. + """) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_related.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_related.py index 992988dc7d..17eb58f421 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_related.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_related.py @@ -1,17 +1,27 @@ +import inspect +import uuid +from collections import defaultdict import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( OptionalPyblishPluginMixin, PublishValidationError, ValidatePipelineOrder) +from ayon_api import get_folders + + +def is_valid_uuid(value) -> bool: + """Return whether value is a valid UUID""" + try: + uuid.UUID(value) + except ValueError: + return False + return True class ValidateNodeIDsRelated(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): - """Validate nodes have a related Colorbleed Id to the - instance.data[folderPath] - - """ + """Validate nodes have a related `cbId` to the instance.data[folderPath]""" order = ValidatePipelineOrder label = 'Node Ids Related (ID)' @@ -39,21 +49,24 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin, # Ensure all nodes have a cbId invalid = self.get_invalid(instance) if invalid: + + invalid_list = "\n".join(f"- {node}" for node in sorted(invalid)) + raise PublishValidationError(( - "Nodes IDs found that are not related to folder '{}' : {}" - ).format( - instance.data["folderPath"], invalid - )) + "Nodes IDs found that are not related to folder '{}':\n{}" + ).format(instance.data["folderPath"], invalid_list), + description=self.get_description() + ) @classmethod def get_invalid(cls, instance): """Return the member nodes that are invalid""" - invalid = list() - folder_id = instance.data["folderEntity"]["id"] - # We do want to check the referenced nodes as we it might be + # We do want to check the referenced nodes as it might be # part of the end product + invalid = list() + nodes_by_other_folder_ids = defaultdict(set) for node in instance: _id = lib.get_id(node) if not _id: @@ -62,5 +75,48 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin, node_folder_id = _id.split(":", 1)[0] if node_folder_id != folder_id: invalid.append(node) + nodes_by_other_folder_ids[node_folder_id].add(node) + + # Log what other assets were found. + if nodes_by_other_folder_ids: + project_name = instance.context.data["projectName"] + other_folder_ids = set(nodes_by_other_folder_ids.keys()) + + # Remove folder ids that are not valid UUID identifiers, these + # may be legacy OpenPype ids + other_folder_ids = {folder_id for folder_id in other_folder_ids + if is_valid_uuid(folder_id)} + if not other_folder_ids: + return invalid + + folder_entities = get_folders(project_name=project_name, + folder_ids=other_folder_ids, + fields=["path"]) + if folder_entities: + # Log names of other assets detected + # We disregard logging nodes/ids for asset ids where no asset + # was found in the database because ValidateNodeIdsInDatabase + # takes care of that. + folder_paths = {entity["path"] for entity in folder_entities} + cls.log.error( + "Found nodes related to other folders:\n{}".format( + "\n".join(f"- {path}" for path in sorted(folder_paths)) + ) + ) return invalid + + @staticmethod + def get_description(): + return inspect.cleandoc("""### Node IDs must match folder id + + The node ids must match the folder entity id you are publishing to. + + Usually these mismatch occurs if you are re-using nodes from another + folder or project. + + #### How to repair? + + The repair action will regenerate new ids for + the invalid nodes to match the instance's folder. + """) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_scene_set_workspace.py b/client/ayon_core/hosts/maya/plugins/publish/validate_scene_set_workspace.py index 6e68cf5d14..c7d5de2050 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_scene_set_workspace.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_scene_set_workspace.py @@ -46,6 +46,6 @@ class ValidateSceneSetWorkspace(pyblish.api.ContextPlugin): raise PublishValidationError( "Maya workspace is not set correctly.\n\n" f"Current workfile `{scene_name}` is not inside the " - "current Maya project root directory `{root_dir}`.\n\n" + f"current Maya project root directory `{root_dir}`.\n\n" "Please use Workfile app to re-save." ) diff --git a/client/ayon_core/hosts/nuke/api/plugin.py b/client/ayon_core/hosts/nuke/api/plugin.py index 56c30a8ff5..fb56dec833 100644 --- a/client/ayon_core/hosts/nuke/api/plugin.py +++ b/client/ayon_core/hosts/nuke/api/plugin.py @@ -5,7 +5,7 @@ import sys import six import random import string -from collections import OrderedDict, defaultdict +from collections import defaultdict from ayon_core.settings import get_current_project_settings from ayon_core.lib import ( diff --git a/client/ayon_core/hosts/substancepainter/api/lib.py b/client/ayon_core/hosts/substancepainter/api/lib.py index 1cb480b552..64c39943ce 100644 --- a/client/ayon_core/hosts/substancepainter/api/lib.py +++ b/client/ayon_core/hosts/substancepainter/api/lib.py @@ -586,7 +586,6 @@ def prompt_new_file_with_mesh(mesh_filepath): # TODO: find a way to improve the process event to # load more complicated mesh app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 3000) - file_dialog.done(file_dialog.Accepted) app.processEvents(QtCore.QEventLoop.AllEvents) @@ -606,7 +605,7 @@ def prompt_new_file_with_mesh(mesh_filepath): mesh_select.setVisible(False) # Ensure UI is visually up-to-date - app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents) + app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 8000) # Trigger the 'select file' dialog to set the path and have the # new file dialog to use the path. @@ -623,8 +622,6 @@ def prompt_new_file_with_mesh(mesh_filepath): "Failed to set mesh path with the prompt dialog:" f"{mesh_filepath}\n\n" "Creating new project directly with the mesh path instead.") - else: - dialog.done(dialog.Accepted) new_action = _get_new_project_action() if not new_action: diff --git a/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py b/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py index f204ff7728..f46afadb5a 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py +++ b/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py @@ -144,7 +144,8 @@ class CreateTextures(Creator): 9: "512", 10: "1024", 11: "2048", - 12: "4096" + 12: "4096", + 13: "8192" }, default=None, label="Size"), diff --git a/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py b/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py index 01cb65dd5c..d5aac1191c 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py +++ b/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py @@ -1,3 +1,5 @@ +import copy +from qtpy import QtWidgets, QtCore from ayon_core.pipeline import ( load, get_representation_path, @@ -8,10 +10,133 @@ from ayon_core.hosts.substancepainter.api.pipeline import ( set_container_metadata, remove_container_metadata ) -from ayon_core.hosts.substancepainter.api.lib import prompt_new_file_with_mesh import substance_painter.project -import qargparse + + +def _convert(substance_attr): + """Return Substance Painter Python API Project attribute from string. + + This converts a string like "ProjectWorkflow.Default" to for example + the Substance Painter Python API equivalent object, like: + `substance_painter.project.ProjectWorkflow.Default` + + Args: + substance_attr (str): The `substance_painter.project` attribute, + for example "ProjectWorkflow.Default" + + Returns: + Any: Substance Python API object of the project attribute. + + Raises: + ValueError: If attribute does not exist on the + `substance_painter.project` python api. + """ + root = substance_painter.project + for attr in substance_attr.split("."): + root = getattr(root, attr, None) + if root is None: + raise ValueError( + "Substance Painter project attribute" + f" does not exist: {substance_attr}") + + return root + + +def get_template_by_name(name: str, templates: list[dict]) -> dict: + return next( + template for template in templates + if template["name"] == name + ) + + +class SubstanceProjectConfigurationWindow(QtWidgets.QDialog): + """The pop-up dialog allows users to choose material + duplicate options for importing Max objects when updating + or switching assets. + """ + def __init__(self, project_templates): + super(SubstanceProjectConfigurationWindow, self).__init__() + self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint) + + self.configuration = None + self.template_names = [template["name"] for template + in project_templates] + self.project_templates = project_templates + + self.widgets = { + "label": QtWidgets.QLabel( + "Select your template for project configuration"), + "template_options": QtWidgets.QComboBox(), + "import_cameras": QtWidgets.QCheckBox("Import Cameras"), + "preserve_strokes": QtWidgets.QCheckBox("Preserve Strokes"), + "clickbox": QtWidgets.QWidget(), + "combobox": QtWidgets.QWidget(), + "buttons": QtWidgets.QDialogButtonBox( + QtWidgets.QDialogButtonBox.Ok + | QtWidgets.QDialogButtonBox.Cancel) + } + + self.widgets["template_options"].addItems(self.template_names) + + template_name = self.widgets["template_options"].currentText() + self._update_to_match_template(template_name) + # Build clickboxes + layout = QtWidgets.QHBoxLayout(self.widgets["clickbox"]) + layout.addWidget(self.widgets["import_cameras"]) + layout.addWidget(self.widgets["preserve_strokes"]) + # Build combobox + layout = QtWidgets.QHBoxLayout(self.widgets["combobox"]) + layout.addWidget(self.widgets["template_options"]) + # Build buttons + layout = QtWidgets.QHBoxLayout(self.widgets["buttons"]) + # Build layout. + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(self.widgets["label"]) + layout.addWidget(self.widgets["combobox"]) + layout.addWidget(self.widgets["clickbox"]) + layout.addWidget(self.widgets["buttons"]) + + self.widgets["template_options"].currentTextChanged.connect( + self._update_to_match_template) + self.widgets["buttons"].accepted.connect(self.on_accept) + self.widgets["buttons"].rejected.connect(self.on_reject) + + def on_accept(self): + self.configuration = self.get_project_configuration() + self.close() + + def on_reject(self): + self.close() + + def _update_to_match_template(self, template_name): + template = get_template_by_name(template_name, self.project_templates) + self.widgets["import_cameras"].setChecked(template["import_cameras"]) + self.widgets["preserve_strokes"].setChecked( + template["preserve_strokes"]) + + def get_project_configuration(self): + templates = self.project_templates + template_name = self.widgets["template_options"].currentText() + template = get_template_by_name(template_name, templates) + template = copy.deepcopy(template) # do not edit the original + template["import_cameras"] = self.widgets["import_cameras"].isChecked() + template["preserve_strokes"] = ( + self.widgets["preserve_strokes"].isChecked() + ) + for key in ["normal_map_format", + "project_workflow", + "tangent_space_mode"]: + template[key] = _convert(template[key]) + return template + + @classmethod + def prompt(cls, templates): + dialog = cls(templates) + dialog.exec_() + configuration = dialog.configuration + dialog.deleteLater() + return configuration class SubstanceLoadProjectMesh(load.LoaderPlugin): @@ -25,48 +150,35 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): icon = "code-fork" color = "orange" - options = [ - qargparse.Boolean( - "preserve_strokes", - default=True, - help="Preserve strokes positions on mesh.\n" - "(only relevant when loading into existing project)" - ), - qargparse.Boolean( - "import_cameras", - default=True, - help="Import cameras from the mesh file." - ) - ] + # Defined via settings + project_templates = [] - def load(self, context, name, namespace, data): + def load(self, context, name, namespace, options=None): # Get user inputs - import_cameras = data.get("import_cameras", True) - preserve_strokes = data.get("preserve_strokes", True) - sp_settings = substance_painter.project.Settings( - import_cameras=import_cameras - ) + result = SubstanceProjectConfigurationWindow.prompt( + self.project_templates) + if not result: + # cancelling loader action + return if not substance_painter.project.is_open(): # Allow to 'initialize' a new project path = self.filepath_from_context(context) - # TODO: improve the prompt dialog function to not - # only works for simple polygon scene - result = prompt_new_file_with_mesh(mesh_filepath=path) - if not result: - self.log.info("User cancelled new project prompt." - "Creating new project directly from" - " Substance Painter API Instead.") - settings = substance_painter.project.create( - mesh_file_path=path, settings=sp_settings - ) - + sp_settings = substance_painter.project.Settings( + import_cameras=result["import_cameras"], + normal_map_format=result["normal_map_format"], + project_workflow=result["project_workflow"], + tangent_space_mode=result["tangent_space_mode"], + default_texture_resolution=result["default_texture_resolution"] + ) + settings = substance_painter.project.create( + mesh_file_path=path, settings=sp_settings + ) else: # Reload the mesh settings = substance_painter.project.MeshReloadingSettings( - import_cameras=import_cameras, - preserve_strokes=preserve_strokes - ) + import_cameras=result["import_cameras"], + preserve_strokes=result["preserve_strokes"]) def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): # noqa if status == substance_painter.project.ReloadMeshStatus.SUCCESS: # noqa @@ -92,7 +204,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): # from the user's original choice. We don't store 'preserve_strokes' # as we always preserve strokes on updates. container["options"] = { - "import_cameras": import_cameras, + "import_cameras": result["import_cameras"], } set_container_metadata(project_mesh_object_name, container) diff --git a/client/ayon_core/hosts/traypublisher/addon.py b/client/ayon_core/hosts/traypublisher/addon.py index 70bdfe9a64..3dd275f223 100644 --- a/client/ayon_core/hosts/traypublisher/addon.py +++ b/client/ayon_core/hosts/traypublisher/addon.py @@ -1,5 +1,6 @@ import os +from pathlib import Path from ayon_core.lib import get_ayon_launcher_args from ayon_core.lib.execute import run_detached_process from ayon_core.addon import ( @@ -57,3 +58,62 @@ def launch(): from ayon_core.tools import traypublisher traypublisher.main() + + +@cli_main.command() +@click_wrap.option( + "--filepath", + help="Full path to CSV file with data", + type=str, + required=True +) +@click_wrap.option( + "--project", + help="Project name in which the context will be used", + type=str, + required=True +) +@click_wrap.option( + "--folder-path", + help="Asset name in which the context will be used", + type=str, + required=True +) +@click_wrap.option( + "--task", + help="Task name under Asset in which the context will be used", + type=str, + required=False +) +@click_wrap.option( + "--ignore-validators", + help="Option to ignore validators", + type=bool, + is_flag=True, + required=False +) +def ingestcsv( + filepath, + project, + folder_path, + task, + ignore_validators +): + """Ingest CSV file into project. + + This command will ingest CSV file into project. CSV file must be in + specific format. See documentation for more information. + """ + from .csv_publish import csvpublish + + # use Path to check if csv_filepath exists + if not Path(filepath).exists(): + raise FileNotFoundError(f"File {filepath} does not exist.") + + csvpublish( + filepath, + project, + folder_path, + task, + ignore_validators + ) diff --git a/client/ayon_core/hosts/traypublisher/csv_publish.py b/client/ayon_core/hosts/traypublisher/csv_publish.py new file mode 100644 index 0000000000..b43792a357 --- /dev/null +++ b/client/ayon_core/hosts/traypublisher/csv_publish.py @@ -0,0 +1,86 @@ +import os + +import pyblish.api +import pyblish.util + +from ayon_api import get_folder_by_path, get_task_by_name +from ayon_core.lib.attribute_definitions import FileDefItem +from ayon_core.pipeline import install_host +from ayon_core.pipeline.create import CreateContext + +from ayon_core.hosts.traypublisher.api import TrayPublisherHost + + +def csvpublish( + filepath, + project_name, + folder_path, + task_name=None, + ignore_validators=False +): + """Publish CSV file. + + Args: + filepath (str): Path to CSV file. + project_name (str): Project name. + folder_path (str): Folder path. + task_name (Optional[str]): Task name. + ignore_validators (Optional[bool]): Option to ignore validators. + """ + + # initialization of host + host = TrayPublisherHost() + install_host(host) + + # setting host context into project + host.set_project_name(project_name) + + # form precreate data with field values + file_field = FileDefItem.from_paths([filepath], False).pop().to_dict() + precreate_data = { + "csv_filepath_data": file_field, + } + + # create context initialization + create_context = CreateContext(host, headless=True) + folder_entity = get_folder_by_path( + project_name, + folder_path=folder_path, + ) + + if not folder_entity: + ValueError( + f"Folder path '{folder_path}' doesn't " + f"exists at project '{project_name}'." + ) + + task_entity = get_task_by_name( + project_name, + folder_entity["id"], + task_name, + ) + + if not task_entity: + ValueError( + f"Task name '{task_name}' doesn't " + f"exists at folder '{folder_path}'." + ) + + create_context.create( + "io.ayon.creators.traypublisher.csv_ingest", + "Main", + folder_entity=folder_entity, + task_entity=task_entity, + pre_create_data=precreate_data, + ) + + # publishing context initialization + pyblish_context = pyblish.api.Context() + pyblish_context.data["create_context"] = create_context + + # redefine targets (skip 'local' to disable validators) + if ignore_validators: + targets = ["default", "ingest"] + + # publishing + pyblish.util.publish(context=pyblish_context, targets=targets) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py new file mode 100644 index 0000000000..8143e8b45b --- /dev/null +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -0,0 +1,741 @@ +import os +import re +import csv +import clique +from io import StringIO +from copy import deepcopy, copy + +from ayon_api import get_folder_by_path, get_task_by_name +from ayon_core.pipeline.create import get_product_name +from ayon_core.pipeline import CreatedInstance +from ayon_core.lib import FileDef, BoolDef +from ayon_core.lib.transcoding import ( + VIDEO_EXTENSIONS, IMAGE_EXTENSIONS +) +from ayon_core.pipeline.create import CreatorError +from ayon_core.hosts.traypublisher.api.plugin import ( + TrayPublishCreator +) + + +class IngestCSV(TrayPublishCreator): + """CSV ingest creator class""" + + icon = "fa.file" + + label = "CSV Ingest" + product_type = "csv_ingest_file" + identifier = "io.ayon.creators.traypublisher.csv_ingest" + + default_variants = ["Main"] + + description = "Ingest products' data from CSV file" + detailed_description = """ +Ingest products' data from CSV file following column and representation +configuration in project settings. +""" + + # Position in the list of creators. + order = 10 + + # settings for this creator + columns_config = {} + representations_config = {} + + def create(self, subset_name, instance_data, pre_create_data): + """Create an product from each row found in the CSV. + + Args: + subset_name (str): The subset name. + instance_data (dict): The instance data. + pre_create_data (dict): + """ + + csv_filepath_data = pre_create_data.get("csv_filepath_data", {}) + + folder = csv_filepath_data.get("directory", "") + if not os.path.exists(folder): + raise CreatorError( + f"Directory '{folder}' does not exist." + ) + filename = csv_filepath_data.get("filenames", []) + self._process_csv_file(subset_name, instance_data, folder, filename[0]) + + def _process_csv_file( + self, subset_name, instance_data, staging_dir, filename): + """Process CSV file. + + Args: + subset_name (str): The subset name. + instance_data (dict): The instance data. + staging_dir (str): The staging directory. + filename (str): The filename. + """ + + # create new instance from the csv file via self function + self._pass_data_to_csv_instance( + instance_data, + staging_dir, + filename + ) + + csv_instance = CreatedInstance( + self.product_type, subset_name, instance_data, self + ) + self._store_new_instance(csv_instance) + + csv_instance["csvFileData"] = { + "filename": filename, + "staging_dir": staging_dir, + } + + # from special function get all data from csv file and convert them + # to new instances + csv_data_for_instances = self._get_data_from_csv( + staging_dir, filename) + + # create instances from csv data via self function + self._create_instances_from_csv_data( + csv_data_for_instances, staging_dir + ) + + def _create_instances_from_csv_data( + self, + csv_data_for_instances, + staging_dir + ): + """Create instances from csv data""" + + for folder_path, prepared_data in csv_data_for_instances.items(): + project_name = self.create_context.get_current_project_name() + products = prepared_data["products"] + + for instance_name, product_data in products.items(): + # get important instance variables + task_name = product_data["task_name"] + task_type = product_data["task_type"] + variant = product_data["variant"] + product_type = product_data["product_type"] + version = product_data["version"] + + # create subset/product name + product_name = get_product_name( + project_name, + task_name, + task_type, + self.host_name, + product_type, + variant + ) + + # make sure frame start/end is inherited from csv columns + # expected frame range data are handles excluded + for _, repre_data in product_data["representations"].items(): # noqa: E501 + frame_start = repre_data["frameStart"] + frame_end = repre_data["frameEnd"] + handle_start = repre_data["handleStart"] + handle_end = repre_data["handleEnd"] + fps = repre_data["fps"] + break + + # try to find any version comment in representation data + version_comment = next( + iter( + repre_data["comment"] + for repre_data in product_data["representations"].values() # noqa: E501 + if repre_data["comment"] + ), + None + ) + + # try to find any slate switch in representation data + slate_exists = any( + repre_data["slate"] + for _, repre_data in product_data["representations"].items() # noqa: E501 + ) + + # get representations from product data + representations = product_data["representations"] + label = f"{folder_path}_{product_name}_v{version:>03}" + + families = ["csv_ingest"] + if slate_exists: + # adding slate to families mainly for loaders to be able + # to filter out slates + families.append("slate") + + # make product data + product_data = { + "name": instance_name, + "folderPath": folder_path, + "families": families, + "label": label, + "task": task_name, + "variant": variant, + "source": "csv", + "frameStart": frame_start, + "frameEnd": frame_end, + "handleStart": handle_start, + "handleEnd": handle_end, + "fps": fps, + "version": version, + "comment": version_comment, + } + + # create new instance + new_instance = CreatedInstance( + product_type, product_name, product_data, self + ) + self._store_new_instance(new_instance) + + if not new_instance.get("prepared_data_for_repres"): + new_instance["prepared_data_for_repres"] = [] + + base_thumbnail_repre_data = { + "name": "thumbnail", + "ext": None, + "files": None, + "stagingDir": None, + "stagingDir_persistent": True, + "tags": ["thumbnail", "delete"], + } + # need to populate all thumbnails for all representations + # so we can check if unique thumbnail per representation + # is needed + thumbnails = [ + repre_data["thumbnailPath"] + for repre_data in representations.values() + if repre_data["thumbnailPath"] + ] + multiple_thumbnails = len(set(thumbnails)) > 1 + explicit_output_name = None + thumbnails_processed = False + for filepath, repre_data in representations.items(): + # check if any review derivate tag is present + reviewable = any( + tag for tag in repre_data.get("tags", []) + # tag can be `ftrackreview` or `review` + if "review" in tag + ) + # since we need to populate multiple thumbnails as + # representation with outputName for (Ftrack instance + # integrator) pairing with reviewable video representations + if ( + thumbnails + and multiple_thumbnails + and reviewable + ): + # multiple unique thumbnails per representation needs + # grouping by outputName + # mainly used in Ftrack instance integrator + explicit_output_name = repre_data["representationName"] + relative_thumbnail_path = repre_data["thumbnailPath"] + # representation might not have thumbnail path + # so ignore this one + if not relative_thumbnail_path: + continue + thumb_dir, thumb_file = \ + self._get_refactor_thumbnail_path( + staging_dir, relative_thumbnail_path) + filename, ext = os.path.splitext(thumb_file) + thumbnail_repr_data = deepcopy( + base_thumbnail_repre_data) + thumbnail_repr_data.update({ + "name": "thumbnail_{}".format(filename), + "ext": ext[1:], + "files": thumb_file, + "stagingDir": thumb_dir, + "outputName": explicit_output_name, + }) + new_instance["prepared_data_for_repres"].append({ + "type": "thumbnail", + "colorspace": None, + "representation": thumbnail_repr_data, + }) + # also add thumbnailPath for ayon to integrate + if not new_instance.get("thumbnailPath"): + new_instance["thumbnailPath"] = ( + os.path.join(thumb_dir, thumb_file) + ) + elif ( + thumbnails + and not multiple_thumbnails + and not thumbnails_processed + or not reviewable + ): + """ + For case where we have only one thumbnail + and not reviewable medias. This needs to be processed + only once per instance. + """ + if not thumbnails: + continue + # here we will use only one thumbnail for + # all representations + relative_thumbnail_path = repre_data["thumbnailPath"] + # popping last thumbnail from list since it is only one + # and we do not need to iterate again over it + if not relative_thumbnail_path: + relative_thumbnail_path = thumbnails.pop() + thumb_dir, thumb_file = \ + self._get_refactor_thumbnail_path( + staging_dir, relative_thumbnail_path) + _, ext = os.path.splitext(thumb_file) + thumbnail_repr_data = deepcopy( + base_thumbnail_repre_data) + thumbnail_repr_data.update({ + "ext": ext[1:], + "files": thumb_file, + "stagingDir": thumb_dir + }) + new_instance["prepared_data_for_repres"].append({ + "type": "thumbnail", + "colorspace": None, + "representation": thumbnail_repr_data, + }) + # also add thumbnailPath for ayon to integrate + if not new_instance.get("thumbnailPath"): + new_instance["thumbnailPath"] = ( + os.path.join(thumb_dir, thumb_file) + ) + + thumbnails_processed = True + + # get representation data + representation_data = self._get_representation_data( + filepath, repre_data, staging_dir, + explicit_output_name + ) + + new_instance["prepared_data_for_repres"].append({ + "type": "media", + "colorspace": repre_data["colorspace"], + "representation": representation_data, + }) + + def _get_refactor_thumbnail_path( + self, staging_dir, relative_thumbnail_path): + thumbnail_abs_path = os.path.join( + staging_dir, relative_thumbnail_path) + return os.path.split( + thumbnail_abs_path) + + def _get_representation_data( + self, filepath, repre_data, staging_dir, explicit_output_name=None + ): + """Get representation data + + Args: + filepath (str): Filepath to representation file. + repre_data (dict): Representation data from CSV file. + staging_dir (str): Staging directory. + explicit_output_name (Optional[str]): Explicit output name. + For grouping purposes with reviewable components. + Defaults to None. + """ + + # get extension of file + basename = os.path.basename(filepath) + extension = os.path.splitext(filepath)[-1].lower() + + # validate filepath is having correct extension based on output + repre_name = repre_data["representationName"] + repre_config_data = None + for repre in self.representations_config["representations"]: + if repre["name"] == repre_name: + repre_config_data = repre + break + + if not repre_config_data: + raise CreatorError( + f"Representation '{repre_name}' not found " + "in config representation data." + ) + + validate_extensions = repre_config_data["extensions"] + if extension not in validate_extensions: + raise CreatorError( + f"File extension '{extension}' not valid for " + f"output '{validate_extensions}'." + ) + + is_sequence = (extension in IMAGE_EXTENSIONS) + # convert ### string in file name to %03d + # this is for correct frame range validation + # example: file.###.exr -> file.%03d.exr + if "#" in basename: + padding = len(basename.split("#")) - 1 + basename = basename.replace("#" * padding, f"%0{padding}d") + is_sequence = True + + # make absolute path to file + absfilepath = os.path.normpath(os.path.join(staging_dir, filepath)) + dirname = os.path.dirname(absfilepath) + + # check if dirname exists + if not os.path.isdir(dirname): + raise CreatorError( + f"Directory '{dirname}' does not exist." + ) + + # collect all data from dirname + paths_for_collection = [] + for file in os.listdir(dirname): + filepath = os.path.join(dirname, file) + paths_for_collection.append(filepath) + + collections, _ = clique.assemble(paths_for_collection) + + if collections: + collections = collections[0] + else: + if is_sequence: + raise CreatorError( + f"No collections found in directory '{dirname}'." + ) + + frame_start = None + frame_end = None + if is_sequence: + files = [os.path.basename(file) for file in collections] + frame_start = list(collections.indexes)[0] + frame_end = list(collections.indexes)[-1] + else: + files = basename + + tags = deepcopy(repre_data["tags"]) + # if slate in repre_data is True then remove one frame from start + if repre_data["slate"]: + tags.append("has_slate") + + # get representation data + representation_data = { + "name": repre_name, + "ext": extension[1:], + "files": files, + "stagingDir": dirname, + "stagingDir_persistent": True, + "tags": tags, + } + if extension in VIDEO_EXTENSIONS: + representation_data.update({ + "fps": repre_data["fps"], + "outputName": repre_name, + }) + + if explicit_output_name: + representation_data["outputName"] = explicit_output_name + + if frame_start: + representation_data["frameStart"] = frame_start + if frame_end: + representation_data["frameEnd"] = frame_end + + return representation_data + + def _get_data_from_csv( + self, package_dir, filename + ): + """Generate instances from the csv file""" + # get current project name and code from context.data + project_name = self.create_context.get_current_project_name() + + csv_file_path = os.path.join( + package_dir, filename + ) + + # make sure csv file contains columns from following list + required_columns = [ + column["name"] for column in self.columns_config["columns"] + if column["required_column"] + ] + + # read csv file + with open(csv_file_path, "r") as csv_file: + csv_content = csv_file.read() + + # read csv file with DictReader + csv_reader = csv.DictReader( + StringIO(csv_content), + delimiter=self.columns_config["csv_delimiter"] + ) + + # fix fieldnames + # sometimes someone can keep extra space at the start or end of + # the column name + all_columns = [ + " ".join(column.rsplit()) for column in csv_reader.fieldnames] + + # return back fixed fieldnames + csv_reader.fieldnames = all_columns + + # check if csv file contains all required columns + if any(column not in all_columns for column in required_columns): + raise CreatorError( + f"Missing required columns: {required_columns}" + ) + + csv_data = {} + # get data from csv file + for row in csv_reader: + # Get required columns first + # TODO: will need to be folder path in CSV + # TODO: `context_asset_name` is now `folder_path` + folder_path = self._get_row_value_with_validation( + "Folder Path", row) + task_name = self._get_row_value_with_validation( + "Task Name", row) + version = self._get_row_value_with_validation( + "Version", row) + + # Get optional columns + variant = self._get_row_value_with_validation( + "Variant", row) + product_type = self._get_row_value_with_validation( + "Product Type", row) + + pre_product_name = ( + f"{task_name}{variant}{product_type}" + f"{version}".replace(" ", "").lower() + ) + + # get representation data + filename, representation_data = \ + self._get_representation_row_data(row) + + # TODO: batch query of all folder paths and task names + + # get folder entity from folder path + folder_entity = get_folder_by_path( + project_name, folder_path) + + # make sure asset exists + if not folder_entity: + raise CreatorError( + f"Asset '{folder_path}' not found." + ) + + # first get all tasks on the folder entity and then find + task_entity = get_task_by_name( + project_name, folder_entity["id"], task_name) + + # check if task name is valid task in asset doc + if not task_entity: + raise CreatorError( + f"Task '{task_name}' not found in asset doc." + ) + + # get all csv data into one dict and make sure there are no + # duplicates data are already validated and sorted under + # correct existing asset also check if asset exists and if + # task name is valid task in asset doc and representations + # are distributed under products following variants + if folder_path not in csv_data: + csv_data[folder_path] = { + "folder_entity": folder_entity, + "products": { + pre_product_name: { + "task_name": task_name, + "task_type": task_entity["taskType"], + "variant": variant, + "product_type": product_type, + "version": version, + "representations": { + filename: representation_data, + }, + } + } + } + else: + csv_products = csv_data[folder_path]["products"] + if pre_product_name not in csv_products: + csv_products[pre_product_name] = { + "task_name": task_name, + "task_type": task_entity["taskType"], + "variant": variant, + "product_type": product_type, + "version": version, + "representations": { + filename: representation_data, + }, + } + else: + csv_representations = \ + csv_products[pre_product_name]["representations"] + if filename in csv_representations: + raise CreatorError( + f"Duplicate filename '{filename}' in csv file." + ) + csv_representations[filename] = representation_data + + return csv_data + + def _get_representation_row_data(self, row_data): + """Get representation row data""" + # Get required columns first + file_path = self._get_row_value_with_validation( + "File Path", row_data) + frame_start = self._get_row_value_with_validation( + "Frame Start", row_data) + frame_end = self._get_row_value_with_validation( + "Frame End", row_data) + handle_start = self._get_row_value_with_validation( + "Handle Start", row_data) + handle_end = self._get_row_value_with_validation( + "Handle End", row_data) + fps = self._get_row_value_with_validation( + "FPS", row_data) + + # Get optional columns + thumbnail_path = self._get_row_value_with_validation( + "Version Thumbnail", row_data) + colorspace = self._get_row_value_with_validation( + "Representation Colorspace", row_data) + comment = self._get_row_value_with_validation( + "Version Comment", row_data) + repre = self._get_row_value_with_validation( + "Representation", row_data) + slate_exists = self._get_row_value_with_validation( + "Slate Exists", row_data) + repre_tags = self._get_row_value_with_validation( + "Representation Tags", row_data) + + # convert tags value to list + tags_list = copy(self.representations_config["default_tags"]) + if repre_tags: + tags_list = [] + tags_delimiter = self.representations_config["tags_delimiter"] + # strip spaces from repre_tags + if tags_delimiter in repre_tags: + tags = repre_tags.split(tags_delimiter) + for _tag in tags: + tags_list.append(("".join(_tag.strip())).lower()) + else: + tags_list.append(repre_tags) + + representation_data = { + "colorspace": colorspace, + "comment": comment, + "representationName": repre, + "slate": slate_exists, + "tags": tags_list, + "thumbnailPath": thumbnail_path, + "frameStart": int(frame_start), + "frameEnd": int(frame_end), + "handleStart": int(handle_start), + "handleEnd": int(handle_end), + "fps": float(fps), + } + return file_path, representation_data + + def _get_row_value_with_validation( + self, column_name, row_data, default_value=None + ): + """Get row value with validation""" + + # get column data from column config + column_data = None + for column in self.columns_config["columns"]: + if column["name"] == column_name: + column_data = column + break + + if not column_data: + raise CreatorError( + f"Column '{column_name}' not found in column config." + ) + + # get column value from row + column_value = row_data.get(column_name) + column_required = column_data["required_column"] + + # check if column value is not empty string and column is required + if column_value == "" and column_required: + raise CreatorError( + f"Value in column '{column_name}' is required." + ) + + # get column type + column_type = column_data["type"] + # get column validation regex + column_validation = column_data["validation_pattern"] + # get column default value + column_default = default_value or column_data["default"] + + if column_type in ["number", "decimal"] and column_default == 0: + column_default = None + + # check if column value is not empty string + if column_value == "": + # set default value if column value is empty string + column_value = column_default + + # set column value to correct type following column type + if column_type == "number" and column_value is not None: + column_value = int(column_value) + elif column_type == "decimal" and column_value is not None: + column_value = float(column_value) + elif column_type == "bool": + column_value = column_value in ["true", "True"] + + # check if column value matches validation regex + if ( + column_value is not None and + not re.match(str(column_validation), str(column_value)) + ): + raise CreatorError( + f"Column '{column_name}' value '{column_value}' " + f"does not match validation regex '{column_validation}' \n" + f"Row data: {row_data} \n" + f"Column data: {column_data}" + ) + + return column_value + + def _pass_data_to_csv_instance( + self, instance_data, staging_dir, filename + ): + """Pass CSV representation file to instance data""" + + representation = { + "name": "csv", + "ext": "csv", + "files": filename, + "stagingDir": staging_dir, + "stagingDir_persistent": True, + } + + instance_data.update({ + "label": f"CSV: {filename}", + "representations": [representation], + "stagingDir": staging_dir, + "stagingDir_persistent": True, + }) + + def get_instance_attr_defs(self): + return [ + BoolDef( + "add_review_family", + default=True, + label="Review" + ) + ] + + def get_pre_create_attr_defs(self): + """Creating pre-create attributes at creator plugin. + + Returns: + list: list of attribute object instances + """ + # Use same attributes as for instance attributes + attr_defs = [ + FileDef( + "csv_filepath_data", + folders=False, + extensions=[".csv"], + allow_sequences=False, + single_item=True, + label="CSV File", + ), + ] + return attr_defs diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py index 843729786c..4057aee9a6 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py @@ -402,7 +402,7 @@ or updating already created. Publishing will create OTIO file. ): continue - instance = self._make_product_instance( + self._make_product_instance( otio_clip, product_type_preset, deepcopy(base_instance_data), diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py new file mode 100644 index 0000000000..33536d0854 --- /dev/null +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py @@ -0,0 +1,47 @@ +from pprint import pformat +import pyblish.api +from ayon_core.pipeline import publish + + +class CollectCSVIngestInstancesData( + pyblish.api.InstancePlugin, + publish.AYONPyblishPluginMixin, + publish.ColormanagedPyblishPluginMixin +): + """Collect CSV Ingest data from instance. + """ + + label = "Collect CSV Ingest instances data" + order = pyblish.api.CollectorOrder + 0.1 + hosts = ["traypublisher"] + families = ["csv_ingest"] + + def process(self, instance): + + # expecting [(colorspace, repre_data), ...] + prepared_repres_data_items = instance.data[ + "prepared_data_for_repres"] + + for prep_repre_data in prepared_repres_data_items: + type = prep_repre_data["type"] + colorspace = prep_repre_data["colorspace"] + repre_data = prep_repre_data["representation"] + + # thumbnails should be skipped + if type == "media": + # colorspace name is passed from CSV column + self.set_representation_colorspace( + repre_data, instance.context, colorspace + ) + elif type == "media" and colorspace is None: + # TODO: implement colorspace file rules file parsing + self.log.warning( + "Colorspace is not defined in csv for following" + f" representation: {pformat(repre_data)}" + ) + pass + elif type == "thumbnail": + # thumbnails should be skipped + pass + + instance.data["representations"].append(repre_data) diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/extract_csv_file.py b/client/ayon_core/hosts/traypublisher/plugins/publish/extract_csv_file.py new file mode 100644 index 0000000000..4bdf7c0493 --- /dev/null +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/extract_csv_file.py @@ -0,0 +1,31 @@ +import pyblish.api + +from ayon_core.pipeline import publish + + +class ExtractCSVFile(publish.Extractor): + """ + Extractor export CSV file + """ + + label = "Extract CSV file" + order = pyblish.api.ExtractorOrder - 0.45 + families = ["csv_ingest_file"] + hosts = ["traypublisher"] + + def process(self, instance): + + csv_file_data = instance.data["csvFileData"] + + representation_csv = { + 'name': "csv_data", + 'ext': "csv", + 'files': csv_file_data["filename"], + "stagingDir": csv_file_data["staging_dir"], + "stagingDir_persistent": True + } + + instance.data["representations"].append(representation_csv) + + self.log.info("Added CSV file representation: {}".format( + representation_csv)) diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py index 3a62536507..0b4f8e16c1 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py @@ -16,6 +16,7 @@ class ValidateExistingVersion( order = ValidateContentsOrder hosts = ["traypublisher"] + targets = ["local"] actions = [RepairAction] diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py index 4f11571efe..13f13b05bb 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py @@ -16,6 +16,8 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, label = "Validate Frame Range" hosts = ["traypublisher"] families = ["render", "plate"] + targets = ["local"] + order = ValidateContentsOrder optional = True diff --git a/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py b/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py index 407671e14d..07c2d91533 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py +++ b/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py @@ -1,6 +1,5 @@ import os -from ayon_core.lib import StringTemplate from ayon_core.pipeline import ( registered_host, get_current_context, @@ -111,8 +110,6 @@ class LoadWorkfile(plugin.Loader): data["version"] = version - filename = StringTemplate.format_strict_template( - file_template, data - ) + filename = work_template["file"].format_strict(data) path = os.path.join(work_root, filename) host.save_workfile(path) diff --git a/client/ayon_core/hosts/unreal/api/__init__.py b/client/ayon_core/hosts/unreal/api/__init__.py index ac6a91eae9..7e7f839f27 100644 --- a/client/ayon_core/hosts/unreal/api/__init__.py +++ b/client/ayon_core/hosts/unreal/api/__init__.py @@ -28,9 +28,11 @@ from .pipeline import ( ) __all__ = [ + "UnrealActorCreator", + "UnrealAssetCreator", + "Loader", "install", "uninstall", - "Loader", "ls", "publish", "containerise", diff --git a/client/ayon_core/lib/plugin_tools.py b/client/ayon_core/lib/plugin_tools.py index 5ad4da88b9..654bc7ac4a 100644 --- a/client/ayon_core/lib/plugin_tools.py +++ b/client/ayon_core/lib/plugin_tools.py @@ -94,8 +94,12 @@ def prepare_template_data(fill_pairs): output = {} for item in valid_items: keys, value = item - upper_value = value.upper() - capitalized_value = _capitalize_value(value) + # Convert only string values + if isinstance(value, str): + upper_value = value.upper() + capitalized_value = _capitalize_value(value) + else: + upper_value = capitalized_value = value first_key = keys.pop(0) if not keys: diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py index bfb65708e6..e3a4cd8030 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py @@ -103,17 +103,17 @@ class FusionSubmitDeadline( # Collect all saver instances in context that are to be rendered saver_instances = [] - for instance in context: - if instance.data["productType"] != "render": + for inst in context: + if inst.data["productType"] != "render": # Allow only saver family instances continue - if not instance.data.get("publish", True): + if not inst.data.get("publish", True): # Skip inactive instances continue - self.log.debug(instance.data["name"]) - saver_instances.append(instance) + self.log.debug(inst.data["name"]) + saver_instances.append(inst) if not saver_instances: raise RuntimeError("No instances found for Deadline submission") diff --git a/client/ayon_core/modules/loader_action.py b/client/ayon_core/modules/loader_action.py index a0cc417b66..1e45db05dc 100644 --- a/client/ayon_core/modules/loader_action.py +++ b/client/ayon_core/modules/loader_action.py @@ -13,7 +13,7 @@ class LoaderAddon(AYONAddon, ITrayAddon): # Add library tool self._loader_imported = False try: - from ayon_core.tools.loader.ui import LoaderWindow + from ayon_core.tools.loader.ui import LoaderWindow # noqa F401 self._loader_imported = True except Exception: diff --git a/client/ayon_core/modules/royalrender/api.py b/client/ayon_core/modules/royalrender/api.py index cd72014a42..a69f88c43c 100644 --- a/client/ayon_core/modules/royalrender/api.py +++ b/client/ayon_core/modules/royalrender/api.py @@ -1,13 +1,14 @@ # -*- coding: utf-8 -*- """Wrapper around Royal Render API.""" -import sys import os +import sys -from ayon_core.lib.local_settings import AYONSettingsRegistry -from ayon_core.lib import Logger, run_subprocess -from .rr_job import RRJob, SubmitFile, SubmitterParameter +from ayon_core.lib import Logger, run_subprocess, AYONSettingsRegistry from ayon_core.lib.vendor_bin_utils import find_tool_in_custom_paths +from .rr_job import SubmitFile +from .rr_job import RRjob, SubmitterParameter # noqa F401 + class Api: diff --git a/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py b/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py index f3287b7638..51500f84f5 100644 --- a/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py +++ b/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py @@ -3,7 +3,6 @@ import os import attr import json -import re import pyblish.api diff --git a/client/ayon_core/pipeline/anatomy/anatomy.py b/client/ayon_core/pipeline/anatomy/anatomy.py index 73dd215233..2aa8eeddbc 100644 --- a/client/ayon_core/pipeline/anatomy/anatomy.py +++ b/client/ayon_core/pipeline/anatomy/anatomy.py @@ -549,7 +549,7 @@ class Anatomy(BaseAnatomy): ) else: # Ask sync server to get roots overrides - roots_overrides = sitesync.get_site_root_overrides( + roots_overrides = sitesync_addon.get_site_root_overrides( project_name, site_name ) site_cache.update_data(roots_overrides) diff --git a/client/ayon_core/pipeline/anatomy/templates.py b/client/ayon_core/pipeline/anatomy/templates.py index 46cad385f0..d89b70719e 100644 --- a/client/ayon_core/pipeline/anatomy/templates.py +++ b/client/ayon_core/pipeline/anatomy/templates.py @@ -14,7 +14,6 @@ from .exceptions import ( TemplateMissingKey, AnatomyTemplateUnsolved, ) -from .roots import RootItem _PLACEHOLDER = object() diff --git a/client/ayon_core/pipeline/context_tools.py b/client/ayon_core/pipeline/context_tools.py index e9151bcd1f..33567d7280 100644 --- a/client/ayon_core/pipeline/context_tools.py +++ b/client/ayon_core/pipeline/context_tools.py @@ -1,7 +1,6 @@ """Core pipeline functionality""" import os -import types import logging import platform import uuid @@ -21,7 +20,6 @@ from .anatomy import Anatomy from .template_data import get_template_data_with_names from .workfile import ( get_workdir, - get_workfile_template_key, get_custom_workfile_template_by_string_context, ) from . import ( diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index ca9896fb3f..b8618738fb 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -1790,10 +1790,10 @@ class CreateContext: creator_identifier = creator_class.identifier if creator_identifier in creators: - self.log.warning(( - "Duplicated Creator identifier. " - "Using first and skipping following" - )) + self.log.warning( + "Duplicate Creator identifier: '%s'. Using first Creator " + "and skipping: %s", creator_identifier, creator_class + ) continue # Filter by host name diff --git a/client/ayon_core/pipeline/farm/pyblish_functions.py b/client/ayon_core/pipeline/farm/pyblish_functions.py index dadf2cbe1a..eb6f8569d9 100644 --- a/client/ayon_core/pipeline/farm/pyblish_functions.py +++ b/client/ayon_core/pipeline/farm/pyblish_functions.py @@ -6,13 +6,11 @@ from copy import deepcopy import attr import ayon_api -import pyblish.api import clique from ayon_core.pipeline import ( get_current_project_name, get_representation_path, - Anatomy, ) from ayon_core.lib import Logger from ayon_core.pipeline.publish import KnownPublishError @@ -137,7 +135,7 @@ def get_transferable_representations(instance): list of dicts: List of transferable representations. """ - anatomy = instance.context.data["anatomy"] # type: Anatomy + anatomy = instance.context.data["anatomy"] to_transfer = [] for representation in instance.data.get("representations", []): @@ -166,7 +164,6 @@ def get_transferable_representations(instance): def create_skeleton_instance( instance, families_transfer=None, instance_transfer=None): - # type: (pyblish.api.Instance, list, dict) -> dict """Create skeleton instance from original instance data. This will create dictionary containing skeleton @@ -191,7 +188,7 @@ def create_skeleton_instance( context = instance.context data = instance.data.copy() - anatomy = instance.context.data["anatomy"] # type: Anatomy + anatomy = instance.context.data["anatomy"] # get time related data from instance (or context) time_data = get_time_data_from_instance_or_context(instance) @@ -620,15 +617,32 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data, aov_patterns = aov_filter preview = match_aov_pattern(app, aov_patterns, render_file_name) - # toggle preview on if multipart is on - if instance.data.get("multipartExr"): - log.debug("Adding preview tag because its multipartExr") - preview = True new_instance = deepcopy(skeleton) new_instance["productName"] = product_name new_instance["productGroup"] = group_name + # toggle preview on if multipart is on + # Because we cant query the multipartExr data member of each AOV we'll + # need to have hardcoded rule of excluding any renders with + # "cryptomatte" in the file name from being a multipart EXR. This issue + # happens with Redshift that forces Cryptomatte renders to be separate + # files even when the rest of the AOVs are merged into a single EXR. + # There might be an edge case where the main instance has cryptomatte + # in the name even though it's a multipart EXR. + if instance.data.get("renderer") == "redshift": + if ( + instance.data.get("multipartExr") and + "cryptomatte" not in render_file_name.lower() + ): + log.debug("Adding preview tag because it's multipartExr") + preview = True + else: + new_instance["multipartExr"] = False + elif instance.data.get("multipartExr"): + log.debug("Adding preview tag because its multipartExr") + preview = True + # explicitly disable review by user preview = preview and not do_not_add_review if preview: @@ -751,7 +765,6 @@ def get_resources(project_name, version_entity, extension=None): def create_skeleton_instance_cache(instance): - # type: (pyblish.api.Instance, list, dict) -> dict """Create skeleton instance from original instance data. This will create dictionary containing skeleton @@ -771,7 +784,7 @@ def create_skeleton_instance_cache(instance): context = instance.context data = instance.data.copy() - anatomy = instance.context.data["anatomy"] # type: Anatomy + anatomy = instance.context.data["anatomy"] # get time related data from instance (or context) time_data = get_time_data_from_instance_or_context(instance) @@ -1005,7 +1018,7 @@ def copy_extend_frames(instance, representation): start = instance.data.get("frameStart") end = instance.data.get("frameEnd") project_name = instance.context.data["project"] - anatomy = instance.context.data["anatomy"] # type: Anatomy + anatomy = instance.context.data["anatomy"] folder_entity = ayon_api.get_folder_by_path( project_name, instance.data.get("folderPath") diff --git a/client/ayon_core/pipeline/publish/abstract_collect_render.py b/client/ayon_core/pipeline/publish/abstract_collect_render.py index 745632ca0a..c50dc16380 100644 --- a/client/ayon_core/pipeline/publish/abstract_collect_render.py +++ b/client/ayon_core/pipeline/publish/abstract_collect_render.py @@ -81,6 +81,9 @@ class RenderInstance(object): outputDir = attr.ib(default=None) context = attr.ib(default=None) + # The source instance the data of this render instance should merge into + source_instance = attr.ib(default=None, type=pyblish.api.Instance) + @frameStart.validator def check_frame_start(self, _, value): """Validate if frame start is not larger then end.""" @@ -214,8 +217,11 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): data = self.add_additional_data(data) render_instance_dict = attr.asdict(render_instance) - instance = context.create_instance(render_instance.name) - instance.data["label"] = render_instance.label + # Merge into source instance if provided, otherwise create instance + instance = render_instance_dict.pop("source_instance", None) + if instance is None: + instance = context.create_instance(render_instance.name) + instance.data.update(render_instance_dict) instance.data.update(data) diff --git a/client/ayon_core/pipeline/schema/__init__.py b/client/ayon_core/pipeline/schema/__init__.py index 67cf120b59..db98a6d080 100644 --- a/client/ayon_core/pipeline/schema/__init__.py +++ b/client/ayon_core/pipeline/schema/__init__.py @@ -13,7 +13,6 @@ Resources: """ import os -import re import json import logging diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 5e63ba444a..3447520b39 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -329,7 +329,7 @@ class AbstractTemplateBuilder(object): is good practice to check if the same value is not already stored under different key or if the key is not already used for something else. - Key should be self explanatory to content. + Key should be self-explanatory to content. - wrong: 'folder' - good: 'folder_name' @@ -375,7 +375,7 @@ class AbstractTemplateBuilder(object): is good practice to check if the same value is not already stored under different key or if the key is not already used for something else. - Key should be self explanatory to content. + Key should be self-explanatory to content. - wrong: 'folder' - good: 'folder_path' @@ -395,7 +395,7 @@ class AbstractTemplateBuilder(object): is good practice to check if the same value is not already stored under different key or if the key is not already used for something else. - Key should be self explanatory to content. + Key should be self-explanatory to content. - wrong: 'folder' - good: 'folder_path' @@ -466,7 +466,7 @@ class AbstractTemplateBuilder(object): return list(sorted( placeholders, - key=lambda i: i.order + key=lambda placeholder: placeholder.order )) def build_template( @@ -498,15 +498,21 @@ class AbstractTemplateBuilder(object): process if version is created """ - template_preset = self.get_template_preset() - - if template_path is None: - template_path = template_preset["path"] - - if keep_placeholders is None: - keep_placeholders = template_preset["keep_placeholder"] - if create_first_version is None: - create_first_version = template_preset["create_first_version"] + if any( + value is None + for value in [ + template_path, + keep_placeholders, + create_first_version, + ] + ): + template_preset = self.get_template_preset() + if template_path is None: + template_path = template_preset["path"] + if keep_placeholders is None: + keep_placeholders = template_preset["keep_placeholder"] + if create_first_version is None: + create_first_version = template_preset["create_first_version"] # check if first version is created created_version_workfile = False @@ -685,7 +691,7 @@ class AbstractTemplateBuilder(object): for placeholder in placeholders } all_processed = len(placeholders) == 0 - # Counter is checked at the ned of a loop so the loop happens at least + # Counter is checked at the end of a loop so the loop happens at least # once. iter_counter = 0 while not all_processed: @@ -772,12 +778,14 @@ class AbstractTemplateBuilder(object): - 'project_settings/{host name}/templated_workfile_build/profiles' Returns: - str: Path to a template file with placeholders. + dict: Dictionary with `path`, `keep_placeholder` and + `create_first_version` settings from the template preset + for current context. Raises: TemplateProfileNotFound: When profiles are not filled. TemplateLoadFailed: Profile was found but path is not set. - TemplateNotFound: Path was set but file does not exists. + TemplateNotFound: Path was set but file does not exist. """ host_name = self.host_name @@ -1045,7 +1053,7 @@ class PlaceholderPlugin(object): Using shared data from builder but stored under plugin identifier. - Key should be self explanatory to content. + Key should be self-explanatory to content. - wrong: 'folder' - good: 'folder_path' @@ -1085,7 +1093,7 @@ class PlaceholderPlugin(object): Using shared data from builder but stored under plugin identifier. - Key should be self explanatory to content. + Key should be self-explanatory to content. - wrong: 'folder' - good: 'folder_path' @@ -1107,10 +1115,10 @@ class PlaceholderItem(object): """Item representing single item in scene that is a placeholder to process. Items are always created and updated by their plugins. Each plugin can use - modified class of 'PlacehoderItem' but only to add more options instead of + modified class of 'PlaceholderItem' but only to add more options instead of new other. - Scene identifier is used to avoid processing of the palceholder item + Scene identifier is used to avoid processing of the placeholder item multiple times so must be unique across whole workfile builder. Args: @@ -1162,7 +1170,7 @@ class PlaceholderItem(object): """Placeholder data which can modify how placeholder is processed. Possible general keys - - order: Can define the order in which is palceholder processed. + - order: Can define the order in which is placeholder processed. Lower == earlier. Other keys are defined by placeholder and should validate them on item @@ -1264,11 +1272,9 @@ class PlaceholderLoadMixin(object): """Unified attribute definitions for load placeholder. Common function for placeholder plugins used for loading of - repsentations. Use it in 'get_placeholder_options'. + representations. Use it in 'get_placeholder_options'. Args: - plugin (PlaceholderPlugin): Plugin used for loading of - representations. options (Dict[str, Any]): Already available options which are used as defaults for attributes. @@ -1468,7 +1474,9 @@ class PlaceholderLoadMixin(object): product_name_regex = None if product_name_regex_value: product_name_regex = re.compile(product_name_regex_value) - product_type = placeholder.data["family"] + product_type = placeholder.data.get("product_type") + if product_type is None: + product_type = placeholder.data["family"] builder_type = placeholder.data["builder_type"] folder_ids = [] @@ -1529,35 +1537,22 @@ class PlaceholderLoadMixin(object): pass - def _reduce_last_version_repre_entities(self, representations): - """Reduce representations to last verison.""" + def _reduce_last_version_repre_entities(self, repre_contexts): + """Reduce representations to last version.""" - mapping = {} - # TODO use representation context with entities - # - using 'folder', 'subset' and 'version' from context on - # representation is danger - for repre_entity in representations: - repre_context = repre_entity["context"] - - folder_name = repre_context["asset"] - product_name = repre_context["subset"] - version = repre_context.get("version", -1) - - if folder_name not in mapping: - mapping[folder_name] = {} - - product_mapping = mapping[folder_name] - if product_name not in product_mapping: - product_mapping[product_name] = collections.defaultdict(list) - - version_mapping = product_mapping[product_name] - version_mapping[version].append(repre_entity) + version_mapping_by_product_id = {} + for repre_context in repre_contexts: + product_id = repre_context["product"]["id"] + version = repre_context["version"]["version"] + version_mapping = version_mapping_by_product_id.setdefault( + product_id, {} + ) + version_mapping.setdefault(version, []).append(repre_context) output = [] - for product_mapping in mapping.values(): - for version_mapping in product_mapping.values(): - last_version = tuple(sorted(version_mapping.keys()))[-1] - output.extend(version_mapping[last_version]) + for version_mapping in version_mapping_by_product_id.values(): + last_version = max(version_mapping.keys()) + output.extend(version_mapping[last_version]) return output def populate_load_placeholder(self, placeholder, ignore_repre_ids=None): @@ -1585,32 +1580,33 @@ class PlaceholderLoadMixin(object): loader_name = placeholder.data["loader"] loader_args = self.parse_loader_args(placeholder.data["loader_args"]) - placeholder_representations = self._get_representations(placeholder) + placeholder_representations = [ + repre_entity + for repre_entity in self._get_representations(placeholder) + if repre_entity["id"] not in ignore_repre_ids + ] - filtered_representations = [] - for representation in self._reduce_last_version_repre_entities( - placeholder_representations - ): - repre_id = representation["id"] - if repre_id not in ignore_repre_ids: - filtered_representations.append(representation) - - if not filtered_representations: + repre_load_contexts = get_representation_contexts( + self.project_name, placeholder_representations + ) + filtered_repre_contexts = self._reduce_last_version_repre_entities( + repre_load_contexts.values() + ) + if not filtered_repre_contexts: self.log.info(( "There's no representation for this placeholder: {}" ).format(placeholder.scene_identifier)) + if not placeholder.data.get("keep_placeholder", True): + self.delete_placeholder(placeholder) return - repre_load_contexts = get_representation_contexts( - self.project_name, filtered_representations - ) loaders_by_name = self.builder.get_loaders_by_name() self._before_placeholder_load( placeholder ) failed = False - for repre_load_context in repre_load_contexts.values(): + for repre_load_context in filtered_repre_contexts: folder_path = repre_load_context["folder"]["path"] product_name = repre_load_context["product"]["name"] representation = repre_load_context["representation"] @@ -1695,8 +1691,6 @@ class PlaceholderCreateMixin(object): publishable instances. Use it with 'get_placeholder_options'. Args: - plugin (PlaceholderPlugin): Plugin used for creating of - publish instances. options (Dict[str, Any]): Already available options which are used as defaults for attributes. diff --git a/client/ayon_core/plugins/actions/open_file_explorer.py b/client/ayon_core/plugins/actions/open_file_explorer.py index 6a456c75c1..50a3107444 100644 --- a/client/ayon_core/plugins/actions/open_file_explorer.py +++ b/client/ayon_core/plugins/actions/open_file_explorer.py @@ -3,8 +3,6 @@ import platform import subprocess from string import Formatter -import ayon_api - from ayon_core.pipeline import ( Anatomy, LauncherAction, diff --git a/client/ayon_core/plugins/load/delete_old_versions.py b/client/ayon_core/plugins/load/delete_old_versions.py index 8e04fd9827..62302e7123 100644 --- a/client/ayon_core/plugins/load/delete_old_versions.py +++ b/client/ayon_core/plugins/load/delete_old_versions.py @@ -1,501 +1,426 @@ -# TODO This plugin is not converted for AYON -# -# import collections -# import os -# import uuid -# -# import clique -# import ayon_api -# from pymongo import UpdateOne -# import qargparse -# from qtpy import QtWidgets, QtCore -# -# from ayon_core import style -# from ayon_core.addon import AddonsManager -# from ayon_core.lib import format_file_size -# from ayon_core.pipeline import load, Anatomy -# from ayon_core.pipeline.load import ( -# get_representation_path_with_anatomy, -# InvalidRepresentationContext, -# ) -# -# -# class DeleteOldVersions(load.ProductLoaderPlugin): -# """Deletes specific number of old version""" -# -# is_multiple_contexts_compatible = True -# sequence_splitter = "__sequence_splitter__" -# -# representations = {"*"} -# product_types = {"*"} -# tool_names = ["library_loader"] -# -# label = "Delete Old Versions" -# order = 35 -# icon = "trash" -# color = "#d8d8d8" -# -# options = [ -# qargparse.Integer( -# "versions_to_keep", default=2, min=0, help="Versions to keep:" -# ), -# qargparse.Boolean( -# "remove_publish_folder", help="Remove publish folder:" -# ) -# ] -# -# def delete_whole_dir_paths(self, dir_paths, delete=True): -# size = 0 -# -# for dir_path in dir_paths: -# # Delete all files and fodlers in dir path -# for root, dirs, files in os.walk(dir_path, topdown=False): -# for name in files: -# file_path = os.path.join(root, name) -# size += os.path.getsize(file_path) -# if delete: -# os.remove(file_path) -# self.log.debug("Removed file: {}".format(file_path)) -# -# for name in dirs: -# if delete: -# os.rmdir(os.path.join(root, name)) -# -# if not delete: -# continue -# -# # Delete even the folder and it's parents folders if they are empty -# while True: -# if not os.path.exists(dir_path): -# dir_path = os.path.dirname(dir_path) -# continue -# -# if len(os.listdir(dir_path)) != 0: -# break -# -# os.rmdir(os.path.join(dir_path)) -# -# return size -# -# def path_from_representation(self, representation, anatomy): -# try: -# context = representation["context"] -# except KeyError: -# return (None, None) -# -# try: -# path = get_representation_path_with_anatomy( -# representation, anatomy -# ) -# except InvalidRepresentationContext: -# return (None, None) -# -# sequence_path = None -# if "frame" in context: -# context["frame"] = self.sequence_splitter -# sequence_path = get_representation_path_with_anatomy( -# representation, anatomy -# ) -# -# if sequence_path: -# sequence_path = sequence_path.normalized() -# -# return (path.normalized(), sequence_path) -# -# def delete_only_repre_files(self, dir_paths, file_paths, delete=True): -# size = 0 -# -# for dir_id, dir_path in dir_paths.items(): -# dir_files = os.listdir(dir_path) -# collections, remainders = clique.assemble(dir_files) -# for file_path, seq_path in file_paths[dir_id]: -# file_path_base = os.path.split(file_path)[1] -# # Just remove file if `frame` key was not in context or -# # filled path is in remainders (single file sequence) -# if not seq_path or file_path_base in remainders: -# if not os.path.exists(file_path): -# self.log.debug( -# "File was not found: {}".format(file_path) -# ) -# continue -# -# size += os.path.getsize(file_path) -# -# if delete: -# os.remove(file_path) -# self.log.debug("Removed file: {}".format(file_path)) -# -# if file_path_base in remainders: -# remainders.remove(file_path_base) -# continue -# -# seq_path_base = os.path.split(seq_path)[1] -# head, tail = seq_path_base.split(self.sequence_splitter) -# -# final_col = None -# for collection in collections: -# if head != collection.head or tail != collection.tail: -# continue -# final_col = collection -# break -# -# if final_col is not None: -# # Fill full path to head -# final_col.head = os.path.join(dir_path, final_col.head) -# for _file_path in final_col: -# if os.path.exists(_file_path): -# -# size += os.path.getsize(_file_path) -# -# if delete: -# os.remove(_file_path) -# self.log.debug( -# "Removed file: {}".format(_file_path) -# ) -# -# _seq_path = final_col.format("{head}{padding}{tail}") -# self.log.debug("Removed files: {}".format(_seq_path)) -# collections.remove(final_col) -# -# elif os.path.exists(file_path): -# size += os.path.getsize(file_path) -# -# if delete: -# os.remove(file_path) -# self.log.debug("Removed file: {}".format(file_path)) -# else: -# self.log.debug( -# "File was not found: {}".format(file_path) -# ) -# -# # Delete as much as possible parent folders -# if not delete: -# return size -# -# for dir_path in dir_paths.values(): -# while True: -# if not os.path.exists(dir_path): -# dir_path = os.path.dirname(dir_path) -# continue -# -# if len(os.listdir(dir_path)) != 0: -# break -# -# self.log.debug("Removed folder: {}".format(dir_path)) -# os.rmdir(dir_path) -# -# return size -# -# def message(self, text): -# msgBox = QtWidgets.QMessageBox() -# msgBox.setText(text) -# msgBox.setStyleSheet(style.load_stylesheet()) -# msgBox.setWindowFlags( -# msgBox.windowFlags() | QtCore.Qt.FramelessWindowHint -# ) -# msgBox.exec_() -# -# def get_data(self, context, versions_count): -# product_entity = context["product"] -# folder_entity = context["folder"] -# project_name = context["project"]["name"] -# anatomy = Anatomy(project_name) -# -# versions = list(ayon_api.get_versions( -# project_name, product_ids=[product_entity["id"]] -# )) -# -# versions_by_parent = collections.defaultdict(list) -# for ent in versions: -# versions_by_parent[ent["productId"]].append(ent) -# -# def sort_func(ent): -# return int(ent["version"]) -# -# all_last_versions = [] -# for _parent_id, _versions in versions_by_parent.items(): -# for idx, version in enumerate( -# sorted(_versions, key=sort_func, reverse=True) -# ): -# if idx >= versions_count: -# break -# all_last_versions.append(version) -# -# self.log.debug("Collected versions ({})".format(len(versions))) -# -# # Filter latest versions -# for version in all_last_versions: -# versions.remove(version) -# -# # Update versions_by_parent without filtered versions -# versions_by_parent = collections.defaultdict(list) -# for ent in versions: -# versions_by_parent[ent["productId"]].append(ent) -# -# # Filter already deleted versions -# versions_to_pop = [] -# for version in versions: -# version_tags = version["data"].get("tags") -# if version_tags and "deleted" in version_tags: -# versions_to_pop.append(version) -# -# for version in versions_to_pop: -# msg = "Folder: \"{}\" | Product: \"{}\" | Version: \"{}\"".format( -# folder_entity["path"], -# product_entity["name"], -# version["version"] -# ) -# self.log.debug(( -# "Skipping version. Already tagged as `deleted`. < {} >" -# ).format(msg)) -# versions.remove(version) -# -# version_ids = [ent["id"] for ent in versions] -# -# self.log.debug( -# "Filtered versions to delete ({})".format(len(version_ids)) -# ) -# -# if not version_ids: -# msg = "Skipping processing. Nothing to delete on {}/{}".format( -# folder_entity["path"], product_entity["name"] -# ) -# self.log.info(msg) -# print(msg) -# return -# -# repres = list(ayon_api.get_representations( -# project_name, version_ids=version_ids -# )) -# -# self.log.debug( -# "Collected representations to remove ({})".format(len(repres)) -# ) -# -# dir_paths = {} -# file_paths_by_dir = collections.defaultdict(list) -# for repre in repres: -# file_path, seq_path = self.path_from_representation( -# repre, anatomy -# ) -# if file_path is None: -# self.log.debug(( -# "Could not format path for represenation \"{}\"" -# ).format(str(repre))) -# continue -# -# dir_path = os.path.dirname(file_path) -# dir_id = None -# for _dir_id, _dir_path in dir_paths.items(): -# if _dir_path == dir_path: -# dir_id = _dir_id -# break -# -# if dir_id is None: -# dir_id = uuid.uuid4() -# dir_paths[dir_id] = dir_path -# -# file_paths_by_dir[dir_id].append([file_path, seq_path]) -# -# dir_ids_to_pop = [] -# for dir_id, dir_path in dir_paths.items(): -# if os.path.exists(dir_path): -# continue -# -# dir_ids_to_pop.append(dir_id) -# -# # Pop dirs from both dictionaries -# for dir_id in dir_ids_to_pop: -# dir_paths.pop(dir_id) -# paths = file_paths_by_dir.pop(dir_id) -# # TODO report of missing directories? -# paths_msg = ", ".join([ -# "'{}'".format(path[0].replace("\\", "/")) for path in paths -# ]) -# self.log.debug(( -# "Folder does not exist. Deleting it's files skipped: {}" -# ).format(paths_msg)) -# -# return { -# "dir_paths": dir_paths, -# "file_paths_by_dir": file_paths_by_dir, -# "versions": versions, -# "folder": folder_entity, -# "product": product_entity, -# "archive_product": versions_count == 0 -# } -# -# def main(self, project_name, data, remove_publish_folder): -# # Size of files. -# size = 0 -# if not data: -# return size -# -# if remove_publish_folder: -# size = self.delete_whole_dir_paths(data["dir_paths"].values()) -# else: -# size = self.delete_only_repre_files( -# data["dir_paths"], data["file_paths_by_dir"] -# ) -# -# mongo_changes_bulk = [] -# for version in data["versions"]: -# orig_version_tags = version["data"].get("tags") or [] -# version_tags = [tag for tag in orig_version_tags] -# if "deleted" not in version_tags: -# version_tags.append("deleted") -# -# if version_tags == orig_version_tags: -# continue -# -# update_query = {"id": version["id"]} -# update_data = {"$set": {"data.tags": version_tags}} -# mongo_changes_bulk.append(UpdateOne(update_query, update_data)) -# -# if data["archive_product"]: -# mongo_changes_bulk.append(UpdateOne( -# { -# "id": data["product"]["id"], -# "type": "subset" -# }, -# {"$set": {"type": "archived_subset"}} -# )) -# -# if mongo_changes_bulk: -# dbcon = AvalonMongoDB() -# dbcon.Session["AYON_PROJECT_NAME"] = project_name -# dbcon.install() -# dbcon.bulk_write(mongo_changes_bulk) -# dbcon.uninstall() -# -# self._ftrack_delete_versions(data) -# -# return size -# -# def _ftrack_delete_versions(self, data): -# """Delete version on ftrack. -# -# Handling of ftrack logic in this plugin is not ideal. But in OP3 it is -# almost impossible to solve the issue other way. -# -# Note: -# Asset versions on ftrack are not deleted but marked as -# "not published" which cause that they're invisible. -# -# Args: -# data (dict): Data sent to product loader with full context. -# """ -# -# # First check for ftrack id on folder entity -# # - skip if ther is none -# ftrack_id = data["folder"]["attrib"].get("ftrackId") -# if not ftrack_id: -# self.log.info(( -# "Folder does not have filled ftrack id. Skipped delete" -# " of ftrack version." -# )) -# return -# -# # Check if ftrack module is enabled -# addons_manager = AddonsManager() -# ftrack_addon = addons_manager.get("ftrack") -# if not ftrack_addon or not ftrack_addon.enabled: -# return -# -# import ftrack_api -# -# session = ftrack_api.Session() -# product_name = data["product"]["name"] -# versions = { -# '"{}"'.format(version_doc["name"]) -# for version_doc in data["versions"] -# } -# asset_versions = session.query( -# ( -# "select id, is_published from AssetVersion where" -# " asset.parent.id is \"{}\"" -# " and asset.name is \"{}\"" -# " and version in ({})" -# ).format( -# ftrack_id, -# product_name, -# ",".join(versions) -# ) -# ).all() -# -# # Set attribute `is_published` to `False` on ftrack AssetVersions -# for asset_version in asset_versions: -# asset_version["is_published"] = False -# -# try: -# session.commit() -# -# except Exception: -# msg = ( -# "Could not set `is_published` attribute to `False`" -# " for selected AssetVersions." -# ) -# self.log.error(msg) -# self.message(msg) -# -# def load(self, contexts, name=None, namespace=None, options=None): -# try: -# size = 0 -# for count, context in enumerate(contexts): -# versions_to_keep = 2 -# remove_publish_folder = False -# if options: -# versions_to_keep = options.get( -# "versions_to_keep", versions_to_keep -# ) -# remove_publish_folder = options.get( -# "remove_publish_folder", remove_publish_folder -# ) -# -# data = self.get_data(context, versions_to_keep) -# if not data: -# continue -# -# project_name = context["project"]["name"] -# size += self.main(project_name, data, remove_publish_folder) -# print("Progressing {}/{}".format(count + 1, len(contexts))) -# -# msg = "Total size of files: {}".format(format_file_size(size)) -# self.log.info(msg) -# self.message(msg) -# -# except Exception: -# self.log.error("Failed to delete versions.", exc_info=True) -# -# -# class CalculateOldVersions(DeleteOldVersions): -# """Calculate file size of old versions""" -# label = "Calculate Old Versions" -# order = 30 -# tool_names = ["library_loader"] -# -# options = [ -# qargparse.Integer( -# "versions_to_keep", default=2, min=0, help="Versions to keep:" -# ), -# qargparse.Boolean( -# "remove_publish_folder", help="Remove publish folder:" -# ) -# ] -# -# def main(self, project_name, data, remove_publish_folder): -# size = 0 -# -# if not data: -# return size -# -# if remove_publish_folder: -# size = self.delete_whole_dir_paths( -# data["dir_paths"].values(), delete=False -# ) -# else: -# size = self.delete_only_repre_files( -# data["dir_paths"], data["file_paths_by_dir"], delete=False -# ) -# -# return size +import collections +import os +import uuid + +import clique +import ayon_api +from ayon_api.operations import OperationsSession +import qargparse +from qtpy import QtWidgets, QtCore + +from ayon_core import style +from ayon_core.lib import format_file_size +from ayon_core.pipeline import load, Anatomy +from ayon_core.pipeline.load import ( + get_representation_path_with_anatomy, + InvalidRepresentationContext, +) + + +class DeleteOldVersions(load.ProductLoaderPlugin): + """Deletes specific number of old version""" + + is_multiple_contexts_compatible = True + sequence_splitter = "__sequence_splitter__" + + representations = ["*"] + product_types = {"*"} + tool_names = ["library_loader"] + + label = "Delete Old Versions" + order = 35 + icon = "trash" + color = "#d8d8d8" + + options = [ + qargparse.Integer( + "versions_to_keep", default=2, min=0, help="Versions to keep:" + ), + qargparse.Boolean( + "remove_publish_folder", help="Remove publish folder:" + ) + ] + + def delete_whole_dir_paths(self, dir_paths, delete=True): + size = 0 + + for dir_path in dir_paths: + # Delete all files and fodlers in dir path + for root, dirs, files in os.walk(dir_path, topdown=False): + for name in files: + file_path = os.path.join(root, name) + size += os.path.getsize(file_path) + if delete: + os.remove(file_path) + self.log.debug("Removed file: {}".format(file_path)) + + for name in dirs: + if delete: + os.rmdir(os.path.join(root, name)) + + if not delete: + continue + + # Delete even the folder and it's parents folders if they are empty + while True: + if not os.path.exists(dir_path): + dir_path = os.path.dirname(dir_path) + continue + + if len(os.listdir(dir_path)) != 0: + break + + os.rmdir(os.path.join(dir_path)) + + return size + + def path_from_representation(self, representation, anatomy): + try: + context = representation["context"] + except KeyError: + return (None, None) + + try: + path = get_representation_path_with_anatomy( + representation, anatomy + ) + except InvalidRepresentationContext: + return (None, None) + + sequence_path = None + if "frame" in context: + context["frame"] = self.sequence_splitter + sequence_path = get_representation_path_with_anatomy( + representation, anatomy + ) + + if sequence_path: + sequence_path = sequence_path.normalized() + + return (path.normalized(), sequence_path) + + def delete_only_repre_files(self, dir_paths, file_paths, delete=True): + size = 0 + + for dir_id, dir_path in dir_paths.items(): + dir_files = os.listdir(dir_path) + collections, remainders = clique.assemble(dir_files) + for file_path, seq_path in file_paths[dir_id]: + file_path_base = os.path.split(file_path)[1] + # Just remove file if `frame` key was not in context or + # filled path is in remainders (single file sequence) + if not seq_path or file_path_base in remainders: + if not os.path.exists(file_path): + self.log.debug( + "File was not found: {}".format(file_path) + ) + continue + + size += os.path.getsize(file_path) + + if delete: + os.remove(file_path) + self.log.debug("Removed file: {}".format(file_path)) + + if file_path_base in remainders: + remainders.remove(file_path_base) + continue + + seq_path_base = os.path.split(seq_path)[1] + head, tail = seq_path_base.split(self.sequence_splitter) + + final_col = None + for collection in collections: + if head != collection.head or tail != collection.tail: + continue + final_col = collection + break + + if final_col is not None: + # Fill full path to head + final_col.head = os.path.join(dir_path, final_col.head) + for _file_path in final_col: + if os.path.exists(_file_path): + + size += os.path.getsize(_file_path) + + if delete: + os.remove(_file_path) + self.log.debug( + "Removed file: {}".format(_file_path) + ) + + _seq_path = final_col.format("{head}{padding}{tail}") + self.log.debug("Removed files: {}".format(_seq_path)) + collections.remove(final_col) + + elif os.path.exists(file_path): + size += os.path.getsize(file_path) + + if delete: + os.remove(file_path) + self.log.debug("Removed file: {}".format(file_path)) + else: + self.log.debug( + "File was not found: {}".format(file_path) + ) + + # Delete as much as possible parent folders + if not delete: + return size + + for dir_path in dir_paths.values(): + while True: + if not os.path.exists(dir_path): + dir_path = os.path.dirname(dir_path) + continue + + if len(os.listdir(dir_path)) != 0: + break + + self.log.debug("Removed folder: {}".format(dir_path)) + os.rmdir(dir_path) + + return size + + def message(self, text): + msgBox = QtWidgets.QMessageBox() + msgBox.setText(text) + msgBox.setStyleSheet(style.load_stylesheet()) + msgBox.setWindowFlags( + msgBox.windowFlags() | QtCore.Qt.FramelessWindowHint + ) + msgBox.exec_() + + def get_data(self, context, versions_count): + product_entity = context["product"] + folder_entity = context["folder"] + project_name = context["project"]["name"] + anatomy = Anatomy(project_name, project_entity=context["project"]) + + version_fields = ayon_api.get_default_fields_for_type("version") + version_fields.add("tags") + versions = list(ayon_api.get_versions( + project_name, + product_ids=[product_entity["id"]], + active=None, + hero=False, + fields=version_fields + )) + self.log.debug( + "Version Number ({})".format(len(versions)) + ) + versions_by_parent = collections.defaultdict(list) + for ent in versions: + versions_by_parent[ent["productId"]].append(ent) + + def sort_func(ent): + return int(ent["version"]) + + all_last_versions = [] + for _parent_id, _versions in versions_by_parent.items(): + for idx, version in enumerate( + sorted(_versions, key=sort_func, reverse=True) + ): + if idx >= versions_count: + break + all_last_versions.append(version) + + self.log.debug("Collected versions ({})".format(len(versions))) + + # Filter latest versions + for version in all_last_versions: + versions.remove(version) + + # Update versions_by_parent without filtered versions + versions_by_parent = collections.defaultdict(list) + for ent in versions: + versions_by_parent[ent["productId"]].append(ent) + + # Filter already deleted versions + versions_to_pop = [] + for version in versions: + if "deleted" in version["tags"]: + versions_to_pop.append(version) + + for version in versions_to_pop: + msg = "Folder: \"{}\" | Product: \"{}\" | Version: \"{}\"".format( + folder_entity["path"], + product_entity["name"], + version["version"] + ) + self.log.debug(( + "Skipping version. Already tagged as inactive. < {} >" + ).format(msg)) + versions.remove(version) + + version_ids = [ent["id"] for ent in versions] + + self.log.debug( + "Filtered versions to delete ({})".format(len(version_ids)) + ) + + if not version_ids: + msg = "Skipping processing. Nothing to delete on {}/{}".format( + folder_entity["path"], product_entity["name"] + ) + self.log.info(msg) + print(msg) + return + + repres = list(ayon_api.get_representations( + project_name, version_ids=version_ids + )) + + self.log.debug( + "Collected representations to remove ({})".format(len(repres)) + ) + + dir_paths = {} + file_paths_by_dir = collections.defaultdict(list) + for repre in repres: + file_path, seq_path = self.path_from_representation( + repre, anatomy + ) + if file_path is None: + self.log.debug(( + "Could not format path for represenation \"{}\"" + ).format(str(repre))) + continue + + dir_path = os.path.dirname(file_path) + dir_id = None + for _dir_id, _dir_path in dir_paths.items(): + if _dir_path == dir_path: + dir_id = _dir_id + break + + if dir_id is None: + dir_id = uuid.uuid4() + dir_paths[dir_id] = dir_path + + file_paths_by_dir[dir_id].append([file_path, seq_path]) + + dir_ids_to_pop = [] + for dir_id, dir_path in dir_paths.items(): + if os.path.exists(dir_path): + continue + + dir_ids_to_pop.append(dir_id) + + # Pop dirs from both dictionaries + for dir_id in dir_ids_to_pop: + dir_paths.pop(dir_id) + paths = file_paths_by_dir.pop(dir_id) + # TODO report of missing directories? + paths_msg = ", ".join([ + "'{}'".format(path[0].replace("\\", "/")) for path in paths + ]) + self.log.debug(( + "Folder does not exist. Deleting its files skipped: {}" + ).format(paths_msg)) + + return { + "dir_paths": dir_paths, + "file_paths_by_dir": file_paths_by_dir, + "versions": versions, + "folder": folder_entity, + "product": product_entity, + "archive_product": versions_count == 0 + } + + def main(self, project_name, data, remove_publish_folder): + # Size of files. + size = 0 + if not data: + return size + + if remove_publish_folder: + size = self.delete_whole_dir_paths(data["dir_paths"].values()) + else: + size = self.delete_only_repre_files( + data["dir_paths"], data["file_paths_by_dir"] + ) + + op_session = OperationsSession() + for version in data["versions"]: + orig_version_tags = version["tags"] + version_tags = list(orig_version_tags) + changes = {} + if "deleted" not in version_tags: + version_tags.append("deleted") + changes["tags"] = version_tags + + if version["active"]: + changes["active"] = False + + if not changes: + continue + op_session.update_entity( + project_name, "version", version["id"], changes + ) + + op_session.commit() + + return size + + def load(self, contexts, name=None, namespace=None, options=None): + try: + size = 0 + for count, context in enumerate(contexts): + versions_to_keep = 2 + remove_publish_folder = False + if options: + versions_to_keep = options.get( + "versions_to_keep", versions_to_keep + ) + remove_publish_folder = options.get( + "remove_publish_folder", remove_publish_folder + ) + + data = self.get_data(context, versions_to_keep) + if not data: + continue + project_name = context["project"]["name"] + size += self.main(project_name, data, remove_publish_folder) + print("Progressing {}/{}".format(count + 1, len(contexts))) + + msg = "Total size of files: {}".format(format_file_size(size)) + self.log.info(msg) + self.message(msg) + + except Exception: + self.log.error("Failed to delete versions.", exc_info=True) + + +class CalculateOldVersions(DeleteOldVersions): + """Calculate file size of old versions""" + label = "Calculate Old Versions" + order = 30 + tool_names = ["library_loader"] + + options = [ + qargparse.Integer( + "versions_to_keep", default=2, min=0, help="Versions to keep:" + ), + qargparse.Boolean( + "remove_publish_folder", help="Remove publish folder:" + ) + ] + + def main(self, project_name, data, remove_publish_folder): + size = 0 + + if not data: + return size + + if remove_publish_folder: + size = self.delete_whole_dir_paths( + data["dir_paths"].values(), delete=False + ) + else: + size = self.delete_only_repre_files( + data["dir_paths"], data["file_paths_by_dir"], delete=False + ) + + return size diff --git a/client/ayon_core/plugins/publish/integrate.py b/client/ayon_core/plugins/publish/integrate.py index ce34f2e88b..764168edd3 100644 --- a/client/ayon_core/plugins/publish/integrate.py +++ b/client/ayon_core/plugins/publish/integrate.py @@ -167,7 +167,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "uasset", "blendScene", "yeticacheUE", - "tycache" + "tycache", + "csv_ingest_file", ] default_template_name = "publish" diff --git a/client/ayon_core/scripts/slates/slate_base/base.py b/client/ayon_core/scripts/slates/slate_base/base.py index 35ef46769c..e1648c916a 100644 --- a/client/ayon_core/scripts/slates/slate_base/base.py +++ b/client/ayon_core/scripts/slates/slate_base/base.py @@ -82,20 +82,6 @@ class BaseObj: def main_style(self): return load_default_style() - def height(self): - raise NotImplementedError( - "Attribute `height` is not implemented for <{}>".format( - self.__clas__.__name__ - ) - ) - - def width(self): - raise NotImplementedError( - "Attribute `width` is not implemented for <{}>".format( - self.__clas__.__name__ - ) - ) - def collect_data(self): return None diff --git a/client/ayon_core/tools/loader/ui/products_model.py b/client/ayon_core/tools/loader/ui/products_model.py index 41342ba0df..b465679c3b 100644 --- a/client/ayon_core/tools/loader/ui/products_model.py +++ b/client/ayon_core/tools/loader/ui/products_model.py @@ -284,7 +284,13 @@ class ProductsModel(QtGui.QStandardItemModel): model_item.setData(label, QtCore.Qt.DisplayRole) return model_item - def _set_version_data_to_product_item(self, model_item, version_item): + def _set_version_data_to_product_item( + self, + model_item, + version_item, + repre_count_by_version_id=None, + sync_availability_by_version_id=None, + ): """ Args: @@ -292,6 +298,10 @@ class ProductsModel(QtGui.QStandardItemModel): from version item. version_item (VersionItem): Item from entities model with information about version. + repre_count_by_version_id (Optional[str, int]): Mapping of + representation count by version id. + sync_availability_by_version_id (Optional[str, Tuple[int, int]]): + Mapping of sync availability by version id. """ model_item.setData(version_item.version_id, VERSION_ID_ROLE) @@ -312,12 +322,20 @@ class ProductsModel(QtGui.QStandardItemModel): # TODO call site sync methods for all versions at once project_name = self._last_project_name version_id = version_item.version_id - repre_count = self._controller.get_versions_representation_count( - project_name, [version_id] - )[version_id] - active, remote = self._controller.get_version_sync_availability( - project_name, [version_id] - )[version_id] + if repre_count_by_version_id is None: + repre_count_by_version_id = ( + self._controller.get_versions_representation_count( + project_name, [version_id] + ) + ) + if sync_availability_by_version_id is None: + sync_availability_by_version_id = ( + self._controller.get_version_sync_availability( + project_name, [version_id] + ) + ) + repre_count = repre_count_by_version_id[version_id] + active, remote = sync_availability_by_version_id[version_id] model_item.setData(repre_count, REPRESENTATIONS_COUNT_ROLE) model_item.setData(active, SYNC_ACTIVE_SITE_AVAILABILITY) @@ -327,7 +345,9 @@ class ProductsModel(QtGui.QStandardItemModel): self, product_item, active_site_icon, - remote_site_icon + remote_site_icon, + repre_count_by_version_id, + sync_availability_by_version_id, ): model_item = self._items_by_id.get(product_item.product_id) versions = list(product_item.version_items.values()) @@ -357,7 +377,12 @@ class ProductsModel(QtGui.QStandardItemModel): model_item.setData(active_site_icon, ACTIVE_SITE_ICON_ROLE) model_item.setData(remote_site_icon, REMOTE_SITE_ICON_ROLE) - self._set_version_data_to_product_item(model_item, last_version) + self._set_version_data_to_product_item( + model_item, + last_version, + repre_count_by_version_id, + sync_availability_by_version_id, + ) return model_item def get_last_project_name(self): @@ -387,6 +412,24 @@ class ProductsModel(QtGui.QStandardItemModel): product_item.product_id: product_item for product_item in product_items } + last_version_id_by_product_id = {} + for product_item in product_items: + versions = list(product_item.version_items.values()) + versions.sort() + last_version = versions[-1] + last_version_id_by_product_id[product_item.product_id] = ( + last_version.version_id + ) + + version_ids = set(last_version_id_by_product_id.values()) + repre_count_by_version_id = self._controller.get_versions_representation_count( + project_name, version_ids + ) + sync_availability_by_version_id = ( + self._controller.get_version_sync_availability( + project_name, version_ids + ) + ) # Prepare product groups product_name_matches_by_group = collections.defaultdict(dict) @@ -443,6 +486,8 @@ class ProductsModel(QtGui.QStandardItemModel): product_item, active_site_icon, remote_site_icon, + repre_count_by_version_id, + sync_availability_by_version_id, ) new_items.append(item) @@ -463,6 +508,8 @@ class ProductsModel(QtGui.QStandardItemModel): product_item, active_site_icon, remote_site_icon, + repre_count_by_version_id, + sync_availability_by_version_id, ) new_merged_items.append(item) merged_product_types.add(product_item.product_type) diff --git a/client/ayon_core/tools/publisher/control_qt.py b/client/ayon_core/tools/publisher/control_qt.py index ee08899cac..bef3a5af3b 100644 --- a/client/ayon_core/tools/publisher/control_qt.py +++ b/client/ayon_core/tools/publisher/control_qt.py @@ -343,8 +343,9 @@ class QtRemotePublishController(BasePublisherController): @abstractmethod def _send_instance_changes_to_client(self): - instance_changes = self._get_instance_changes_for_client() - # Implement to send 'instance_changes' value to client + # TODO Implement to send 'instance_changes' value to client + # instance_changes = self._get_instance_changes_for_client() + pass @abstractmethod def save_changes(self): diff --git a/client/ayon_core/tools/tray/tray.py b/client/ayon_core/tools/tray/tray.py index 3c6c529be8..957518afe4 100644 --- a/client/ayon_core/tools/tray/tray.py +++ b/client/ayon_core/tools/tray/tray.py @@ -552,7 +552,7 @@ class TrayStarter(QtCore.QObject): def main(): app = get_ayon_qt_app() - starter = TrayStarter(app) + starter = TrayStarter(app) # noqa F841 if not is_running_from_build() and os.name == "nt": import ctypes diff --git a/client/ayon_core/tools/utils/color_widgets/color_inputs.py b/client/ayon_core/tools/utils/color_widgets/color_inputs.py index 9c8e7b92e8..795b80fc1e 100644 --- a/client/ayon_core/tools/utils/color_widgets/color_inputs.py +++ b/client/ayon_core/tools/utils/color_widgets/color_inputs.py @@ -562,11 +562,11 @@ class HSLInputs(QtWidgets.QWidget): return self._block_changes = True - h, s, l, _ = self.color.getHsl() + hue, sat, lum, _ = self.color.getHsl() - self.input_hue.setValue(h) - self.input_sat.setValue(s) - self.input_light.setValue(l) + self.input_hue.setValue(hue) + self.input_sat.setValue(sat) + self.input_light.setValue(lum) self._block_changes = False diff --git a/client/ayon_core/tools/utils/widgets.py b/client/ayon_core/tools/utils/widgets.py index 1d4f85246f..21cab5d682 100644 --- a/client/ayon_core/tools/utils/widgets.py +++ b/client/ayon_core/tools/utils/widgets.py @@ -578,7 +578,8 @@ class OptionalAction(QtWidgets.QWidgetAction): def set_option_tip(self, options): sep = "\n\n" if not options or not isinstance(options[0], AbstractAttrDef): - mak = (lambda opt: opt["name"] + " :\n " + opt["help"]) + def mak(opt): + return opt["name"] + " :\n " + opt["help"] self.option_tip = sep.join(mak(opt) for opt in options) return diff --git a/client/ayon_core/tools/workfile_template_build/lib.py b/client/ayon_core/tools/workfile_template_build/lib.py index de3a0d0084..ffd6fefc38 100644 --- a/client/ayon_core/tools/workfile_template_build/lib.py +++ b/client/ayon_core/tools/workfile_template_build/lib.py @@ -8,12 +8,12 @@ from ayon_core.tools.utils.dialogs import show_message_dialog def open_template_ui(builder, main_window): """Open template from `builder` - Asks user about overwriting current scene and feedsback exceptions. + Asks user about overwriting current scene and feedback exceptions. """ result = QtWidgets.QMessageBox.question( main_window, "Opening template", - "Caution! You will loose unsaved changes.\nDo you want to continue?", + "Caution! You will lose unsaved changes.\nDo you want to continue?", QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No ) if result == QtWidgets.QMessageBox.Yes: diff --git a/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py b/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py index 6a1572deb2..fe6abee951 100644 --- a/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py +++ b/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py @@ -20,6 +20,8 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): controller (AbstractWorkfilesFrontend): The control object. """ + refreshed = QtCore.Signal() + def __init__(self, controller): super(WorkAreaFilesModel, self).__init__() @@ -163,6 +165,12 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): self._fill_items() def _fill_items(self): + try: + self._fill_items_impl() + finally: + self.refreshed.emit() + + def _fill_items_impl(self): folder_id = self._selected_folder_id task_id = self._selected_task_id if not folder_id or not task_id: @@ -285,6 +293,7 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): selection_model.selectionChanged.connect(self._on_selection_change) view.double_clicked.connect(self._on_mouse_double_click) view.customContextMenuRequested.connect(self._on_context_menu) + model.refreshed.connect(self._on_model_refresh) controller.register_event_callback( "expected_selection_changed", @@ -298,6 +307,7 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): self._controller = controller self._published_mode = False + self._change_selection_on_refresh = True def set_published_mode(self, published_mode): """Set the published mode. @@ -379,7 +389,9 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): if not workfile_info["current"]: return + self._change_selection_on_refresh = False self._model.refresh() + self._change_selection_on_refresh = True workfile_name = workfile_info["name"] if ( @@ -394,3 +406,30 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): self._controller.expected_workfile_selected( event["folder"]["id"], event["task"]["name"], workfile_name ) + + def _on_model_refresh(self): + if ( + not self._change_selection_on_refresh + or self._proxy_model.rowCount() < 1 + ): + return + + # Find the row with latest date modified + latest_index = max( + ( + self._proxy_model.index(idx, 0) + for idx in range(self._proxy_model.rowCount()) + ), + key=lambda model_index: model_index.data(DATE_MODIFIED_ROLE) + ) + + # Select row of latest modified + selection_model = self._view.selectionModel() + selection_model.select( + latest_index, + ( + QtCore.QItemSelectionModel.ClearAndSelect + | QtCore.QItemSelectionModel.Current + | QtCore.QItemSelectionModel.Rows + ) + ) diff --git a/client/ayon_core/tools/workfiles/widgets/window.py b/client/ayon_core/tools/workfiles/widgets/window.py index 8a2617d270..1cfae7ec90 100644 --- a/client/ayon_core/tools/workfiles/widgets/window.py +++ b/client/ayon_core/tools/workfiles/widgets/window.py @@ -118,11 +118,11 @@ class WorkfilesToolWindow(QtWidgets.QWidget): overlay_invalid_host = InvalidHostOverlay(self) overlay_invalid_host.setVisible(False) - first_show_timer = QtCore.QTimer() - first_show_timer.setSingleShot(True) - first_show_timer.setInterval(50) + show_timer = QtCore.QTimer() + show_timer.setSingleShot(True) + show_timer.setInterval(50) - first_show_timer.timeout.connect(self._on_first_show) + show_timer.timeout.connect(self._on_show) controller.register_event_callback( "save_as.finished", @@ -159,7 +159,7 @@ class WorkfilesToolWindow(QtWidgets.QWidget): self._tasks_widget = tasks_widget self._side_panel = side_panel - self._first_show_timer = first_show_timer + self._show_timer = show_timer self._post_init() @@ -287,9 +287,9 @@ class WorkfilesToolWindow(QtWidgets.QWidget): def showEvent(self, event): super(WorkfilesToolWindow, self).showEvent(event) + self._show_timer.start() if self._first_show: self._first_show = False - self._first_show_timer.start() self.setStyleSheet(style.load_stylesheet()) def keyPressEvent(self, event): @@ -303,9 +303,8 @@ class WorkfilesToolWindow(QtWidgets.QWidget): pass - def _on_first_show(self): - if not self._controller_refreshed: - self.refresh() + def _on_show(self): + self.refresh() def _on_file_text_filter_change(self, text): self._files_widget.set_text_filter(text) diff --git a/pyproject.toml b/pyproject.toml index dc8b312364..c1f6ddfb0b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,6 +77,20 @@ unfixable = [] # Allow unused variables when underscore-prefixed. dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" +exclude = [ + "client/ayon_core/hosts/unreal/integration/*", + "client/ayon_core/hosts/aftereffects/api/extension/js/libs/*", + "client/ayon_core/hosts/hiero/api/startup/*", + "client/ayon_core/modules/deadline/repository/custom/plugins/CelAction/*", + "client/ayon_core/modules/deadline/repository/custom/plugins/HarmonyAYON/*", + "client/ayon_core/modules/click_wrap.py", + "client/ayon_core/scripts/slates/__init__.py" +] + +[tool.ruff.lint.per-file-ignores] +"client/ayon_core/lib/__init__.py" = ["E402"] +"client/ayon_core/hosts/max/startup/startup.py" = ["E402"] + [tool.ruff.format] # Like Black, use double quotes for strings. quote-style = "double" diff --git a/server_addon/aftereffects/package.py b/server_addon/aftereffects/package.py new file mode 100644 index 0000000000..a680b37602 --- /dev/null +++ b/server_addon/aftereffects/package.py @@ -0,0 +1,3 @@ +name = "aftereffects" +title = "AfterEffects" +version = "0.1.3" diff --git a/server_addon/aftereffects/server/__init__.py b/server_addon/aftereffects/server/__init__.py index e14e76e9db..76e6d5b2eb 100644 --- a/server_addon/aftereffects/server/__init__.py +++ b/server_addon/aftereffects/server/__init__.py @@ -1,14 +1,9 @@ from ayon_server.addons import BaseServerAddon from .settings import AfterEffectsSettings, DEFAULT_AFTEREFFECTS_SETTING -from .version import __version__ class AfterEffects(BaseServerAddon): - name = "aftereffects" - title = "AfterEffects" - version = __version__ - settings_model = AfterEffectsSettings async def get_default_settings(self): diff --git a/server_addon/aftereffects/server/version.py b/server_addon/aftereffects/server/version.py deleted file mode 100644 index e57ad00718..0000000000 --- a/server_addon/aftereffects/server/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring addon version.""" -__version__ = "0.1.3" diff --git a/client/ayon_core/addons/applications/ayon_applications/__init__.py b/server_addon/applications/client/ayon_applications/__init__.py similarity index 97% rename from client/ayon_core/addons/applications/ayon_applications/__init__.py rename to server_addon/applications/client/ayon_applications/__init__.py index b4a50279ab..c9b72f9914 100644 --- a/client/ayon_core/addons/applications/ayon_applications/__init__.py +++ b/server_addon/applications/client/ayon_applications/__init__.py @@ -31,6 +31,7 @@ from .addon import ApplicationsAddon __all__ = ( + "APPLICATIONS_ADDON_ROOT", "DEFAULT_ENV_SUBGROUP", "PLATFORM_NAMES", diff --git a/client/ayon_core/addons/applications/ayon_applications/addon.py b/server_addon/applications/client/ayon_applications/addon.py similarity index 100% rename from client/ayon_core/addons/applications/ayon_applications/addon.py rename to server_addon/applications/client/ayon_applications/addon.py diff --git a/client/ayon_core/addons/applications/ayon_applications/constants.py b/server_addon/applications/client/ayon_applications/constants.py similarity index 100% rename from client/ayon_core/addons/applications/ayon_applications/constants.py rename to server_addon/applications/client/ayon_applications/constants.py diff --git a/client/ayon_core/addons/applications/ayon_applications/defs.py b/server_addon/applications/client/ayon_applications/defs.py similarity index 100% rename from client/ayon_core/addons/applications/ayon_applications/defs.py rename to server_addon/applications/client/ayon_applications/defs.py diff --git a/client/ayon_core/addons/applications/ayon_applications/exceptions.py b/server_addon/applications/client/ayon_applications/exceptions.py similarity index 100% rename from client/ayon_core/addons/applications/ayon_applications/exceptions.py rename to server_addon/applications/client/ayon_applications/exceptions.py diff --git a/client/ayon_core/addons/applications/ayon_applications/hooks.py b/server_addon/applications/client/ayon_applications/hooks.py similarity index 100% rename from client/ayon_core/addons/applications/ayon_applications/hooks.py rename to server_addon/applications/client/ayon_applications/hooks.py diff --git a/client/ayon_core/addons/applications/ayon_applications/manager.py b/server_addon/applications/client/ayon_applications/manager.py similarity index 100% rename from client/ayon_core/addons/applications/ayon_applications/manager.py rename to server_addon/applications/client/ayon_applications/manager.py diff --git a/client/ayon_core/addons/applications/ayon_applications/plugins/publish/collect_app_name.py b/server_addon/applications/client/ayon_applications/plugins/publish/collect_app_name.py similarity index 100% rename from client/ayon_core/addons/applications/ayon_applications/plugins/publish/collect_app_name.py rename to server_addon/applications/client/ayon_applications/plugins/publish/collect_app_name.py diff --git a/client/ayon_core/addons/applications/ayon_applications/utils.py b/server_addon/applications/client/ayon_applications/utils.py similarity index 100% rename from client/ayon_core/addons/applications/ayon_applications/utils.py rename to server_addon/applications/client/ayon_applications/utils.py diff --git a/server_addon/applications/package.py b/server_addon/applications/package.py new file mode 100644 index 0000000000..43a301b7c2 --- /dev/null +++ b/server_addon/applications/package.py @@ -0,0 +1,10 @@ +name = "applications" +title = "Applications" +version = "0.2.0" + +ayon_server_version = ">=1.0.7" +ayon_launcher_version = ">=1.0.2" +ayon_required_addons = { + "core": ">0.3.0", +} +ayon_compatible_addons = {} diff --git a/server_addon/applications/server/__init__.py b/server_addon/applications/server/__init__.py index 2668589cbe..d85678b77b 100644 --- a/server_addon/applications/server/__init__.py +++ b/server_addon/applications/server/__init__.py @@ -6,7 +6,6 @@ from ayon_server.addons import BaseServerAddon, AddonLibrary from ayon_server.entities.core import attribute_library from ayon_server.lib.postgres import Postgres -from .version import __version__ from .settings import ApplicationsAddonSettings, DEFAULT_VALUES try: @@ -87,9 +86,6 @@ def get_enum_items_from_groups(groups): class ApplicationsAddon(BaseServerAddon): - name = "applications" - title = "Applications" - version = __version__ settings_model = ApplicationsAddonSettings async def get_default_settings(self): diff --git a/server_addon/applications/server/version.py b/server_addon/applications/server/version.py deleted file mode 100644 index c11f861afb..0000000000 --- a/server_addon/applications/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.9" diff --git a/server_addon/blender/package.py b/server_addon/blender/package.py new file mode 100644 index 0000000000..667076e533 --- /dev/null +++ b/server_addon/blender/package.py @@ -0,0 +1,3 @@ +name = "blender" +title = "Blender" +version = "0.1.8" diff --git a/server_addon/blender/server/__init__.py b/server_addon/blender/server/__init__.py index a7d6cb4400..b274e3bc29 100644 --- a/server_addon/blender/server/__init__.py +++ b/server_addon/blender/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import BlenderSettings, DEFAULT_VALUES class BlenderAddon(BaseServerAddon): - name = "blender" - title = "Blender" - version = __version__ settings_model: Type[BlenderSettings] = BlenderSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/blender/server/version.py b/server_addon/blender/server/version.py deleted file mode 100644 index 9cb17e7976..0000000000 --- a/server_addon/blender/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.8" diff --git a/server_addon/celaction/package.py b/server_addon/celaction/package.py new file mode 100644 index 0000000000..2b11a8630f --- /dev/null +++ b/server_addon/celaction/package.py @@ -0,0 +1,3 @@ +name = "celaction" +title = "CelAction" +version = "0.1.0" diff --git a/server_addon/celaction/server/__init__.py b/server_addon/celaction/server/__init__.py index 90d3dbaa01..e3769a4b7f 100644 --- a/server_addon/celaction/server/__init__.py +++ b/server_addon/celaction/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import CelActionSettings, DEFAULT_VALUES class CelActionAddon(BaseServerAddon): - name = "celaction" - title = "CelAction" - version = __version__ settings_model: Type[CelActionSettings] = CelActionSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/celaction/server/version.py b/server_addon/celaction/server/version.py deleted file mode 100644 index 3dc1f76bc6..0000000000 --- a/server_addon/celaction/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.0" diff --git a/server_addon/clockify/package.py b/server_addon/clockify/package.py new file mode 100644 index 0000000000..bcf9425b3f --- /dev/null +++ b/server_addon/clockify/package.py @@ -0,0 +1,3 @@ +name = "clockify" +title = "Clockify" +version = "0.1.1" diff --git a/server_addon/clockify/server/__init__.py b/server_addon/clockify/server/__init__.py index 0fa453fdf4..11bbfed261 100644 --- a/server_addon/clockify/server/__init__.py +++ b/server_addon/clockify/server/__init__.py @@ -2,14 +2,8 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import ClockifySettings class ClockifyAddon(BaseServerAddon): - name = "clockify" - title = "Clockify" - version = __version__ settings_model: Type[ClockifySettings] = ClockifySettings - frontend_scopes = {} - services = {} diff --git a/server_addon/clockify/server/version.py b/server_addon/clockify/server/version.py deleted file mode 100644 index 485f44ac21..0000000000 --- a/server_addon/clockify/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.1" diff --git a/server_addon/create_ayon_addons.py b/server_addon/create_ayon_addons.py index c2686199be..f0a36d4740 100644 --- a/server_addon/create_ayon_addons.py +++ b/server_addon/create_ayon_addons.py @@ -4,6 +4,8 @@ import re import shutil import argparse import zipfile +import types +import importlib.machinery import platform import collections from pathlib import Path @@ -44,6 +46,11 @@ version = "{addon_version}" plugin_for = ["ayon_server"] """ +CLIENT_VERSION_CONTENT = '''# -*- coding: utf-8 -*- +"""Package declaring AYON core addon version.""" +__version__ = "{}" +''' + class ZipFileLongPaths(zipfile.ZipFile): """Allows longer paths in zip files. @@ -175,13 +182,71 @@ def create_addon_zip( shutil.rmtree(str(output_dir / addon_name)) +def prepare_client_code( + addon_dir: Path, + addon_output_dir: Path, + addon_version: str +): + client_dir = addon_dir / "client" + if not client_dir.exists(): + return + + # Prepare private dir in output + private_dir = addon_output_dir / "private" + private_dir.mkdir(parents=True, exist_ok=True) + + # Copy pyproject toml if available + pyproject_toml = client_dir / "pyproject.toml" + if pyproject_toml.exists(): + shutil.copy(pyproject_toml, private_dir) + + for subpath in client_dir.iterdir(): + if subpath.name == "pyproject.toml": + continue + + if subpath.is_file(): + continue + + # Update version.py with server version if 'version.py' is available + version_path = subpath / "version.py" + if version_path.exists(): + with open(version_path, "w") as stream: + stream.write(CLIENT_VERSION_CONTENT.format(addon_version)) + + zip_filepath = private_dir / "client.zip" + with ZipFileLongPaths(zip_filepath, "w", zipfile.ZIP_DEFLATED) as zipf: + # Add client code content to zip + for path, sub_path in find_files_in_subdir(str(subpath)): + sub_path = os.path.join(subpath.name, sub_path) + zipf.write(path, sub_path) + + +def import_filepath(path: Path, module_name: Optional[str] = None): + if not module_name: + module_name = os.path.splitext(path.name)[0] + + # Convert to string + path = str(path) + module = types.ModuleType(module_name) + module.__file__ = path + + # Use loader so module has full specs + module_loader = importlib.machinery.SourceFileLoader( + module_name, path + ) + module_loader.exec_module(module) + return module + + def create_addon_package( addon_dir: Path, output_dir: Path, create_zip: bool, keep_source: bool, ): - addon_version = get_addon_version(addon_dir) + src_package_py = addon_dir / "package.py" + package = import_filepath(src_package_py) + addon_version = package.version addon_output_dir = output_dir / addon_dir.name / addon_version if addon_output_dir.exists(): @@ -189,22 +254,16 @@ def create_addon_package( addon_output_dir.mkdir(parents=True) # Copy server content - package_py = addon_output_dir / "package.py" - addon_name = addon_dir.name - if addon_name == "royal_render": - addon_name = "royalrender" - package_py_content = PACKAGE_PY_TEMPLATE.format( - addon_name=addon_name, addon_version=addon_version - ) - - with open(package_py, "w+") as pkg_py: - pkg_py.write(package_py_content) + dst_package_py = addon_output_dir / "package.py" + shutil.copy(src_package_py, dst_package_py) server_dir = addon_dir / "server" shutil.copytree( server_dir, addon_output_dir / "server", dirs_exist_ok=True ) + prepare_client_code(addon_dir, addon_output_dir, addon_version) + if create_zip: create_addon_zip( output_dir, addon_dir.name, addon_version, keep_source diff --git a/server_addon/deadline/package.py b/server_addon/deadline/package.py new file mode 100644 index 0000000000..944797fea6 --- /dev/null +++ b/server_addon/deadline/package.py @@ -0,0 +1,3 @@ +name = "deadline" +title = "Deadline" +version = "0.1.10" diff --git a/server_addon/deadline/server/__init__.py b/server_addon/deadline/server/__init__.py index 36d04189a9..e7dcb7d347 100644 --- a/server_addon/deadline/server/__init__.py +++ b/server_addon/deadline/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import DeadlineSettings, DEFAULT_VALUES class Deadline(BaseServerAddon): - name = "deadline" - title = "Deadline" - version = __version__ settings_model: Type[DeadlineSettings] = DeadlineSettings async def get_default_settings(self): diff --git a/server_addon/deadline/server/settings/main.py b/server_addon/deadline/server/settings/main.py index 9537d6d550..21a314cd2f 100644 --- a/server_addon/deadline/server/settings/main.py +++ b/server_addon/deadline/server/settings/main.py @@ -1,3 +1,4 @@ +from typing import TYPE_CHECKING from pydantic import validator from ayon_server.settings import ( @@ -5,6 +6,8 @@ from ayon_server.settings import ( SettingsField, ensure_unique_names, ) +if TYPE_CHECKING: + from ayon_server.addons import BaseServerAddon from .publish_plugins import ( PublishPluginsModel, diff --git a/server_addon/deadline/server/version.py b/server_addon/deadline/server/version.py deleted file mode 100644 index 569b1212f7..0000000000 --- a/server_addon/deadline/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.10" diff --git a/server_addon/flame/package.py b/server_addon/flame/package.py new file mode 100644 index 0000000000..8c077ed91d --- /dev/null +++ b/server_addon/flame/package.py @@ -0,0 +1,3 @@ +name = "flame" +title = "Flame" +version = "0.1.0" diff --git a/server_addon/flame/server/__init__.py b/server_addon/flame/server/__init__.py index 7d5eb3960f..4aa46617ee 100644 --- a/server_addon/flame/server/__init__.py +++ b/server_addon/flame/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import FlameSettings, DEFAULT_VALUES class FlameAddon(BaseServerAddon): - name = "flame" - title = "Flame" - version = __version__ settings_model: Type[FlameSettings] = FlameSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/flame/server/version.py b/server_addon/flame/server/version.py deleted file mode 100644 index 3dc1f76bc6..0000000000 --- a/server_addon/flame/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.0" diff --git a/server_addon/fusion/package.py b/server_addon/fusion/package.py new file mode 100644 index 0000000000..9e7a46df2c --- /dev/null +++ b/server_addon/fusion/package.py @@ -0,0 +1,3 @@ +name = "fusion" +title = "Fusion" +version = "0.1.5" diff --git a/server_addon/fusion/server/__init__.py b/server_addon/fusion/server/__init__.py index 4d43f28812..0456cfd5ee 100644 --- a/server_addon/fusion/server/__init__.py +++ b/server_addon/fusion/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import FusionSettings, DEFAULT_VALUES class FusionAddon(BaseServerAddon): - name = "fusion" - title = "Fusion" - version = __version__ settings_model: Type[FusionSettings] = FusionSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/fusion/server/version.py b/server_addon/fusion/server/version.py deleted file mode 100644 index 1276d0254f..0000000000 --- a/server_addon/fusion/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.5" diff --git a/server_addon/harmony/package.py b/server_addon/harmony/package.py new file mode 100644 index 0000000000..83e88e7d57 --- /dev/null +++ b/server_addon/harmony/package.py @@ -0,0 +1,3 @@ +name = "harmony" +title = "Harmony" +version = "0.1.2" diff --git a/server_addon/harmony/server/__init__.py b/server_addon/harmony/server/__init__.py index 4ecda1989e..154618241e 100644 --- a/server_addon/harmony/server/__init__.py +++ b/server_addon/harmony/server/__init__.py @@ -1,14 +1,9 @@ from ayon_server.addons import BaseServerAddon from .settings import HarmonySettings, DEFAULT_HARMONY_SETTING -from .version import __version__ class Harmony(BaseServerAddon): - name = "harmony" - title = "Harmony" - version = __version__ - settings_model = HarmonySettings async def get_default_settings(self): diff --git a/server_addon/harmony/server/version.py b/server_addon/harmony/server/version.py deleted file mode 100644 index df0c92f1e2..0000000000 --- a/server_addon/harmony/server/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring addon version.""" -__version__ = "0.1.2" diff --git a/server_addon/hiero/package.py b/server_addon/hiero/package.py new file mode 100644 index 0000000000..54c2f74fa7 --- /dev/null +++ b/server_addon/hiero/package.py @@ -0,0 +1,3 @@ +name = "hiero" +title = "Hiero" +version = "0.1.3" diff --git a/server_addon/hiero/server/__init__.py b/server_addon/hiero/server/__init__.py index d0f9bcefc3..3db78eafd7 100644 --- a/server_addon/hiero/server/__init__.py +++ b/server_addon/hiero/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import HieroSettings, DEFAULT_VALUES class HieroAddon(BaseServerAddon): - name = "hiero" - title = "Hiero" - version = __version__ settings_model: Type[HieroSettings] = HieroSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/hiero/server/version.py b/server_addon/hiero/server/version.py deleted file mode 100644 index ae7362549b..0000000000 --- a/server_addon/hiero/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.3" diff --git a/server_addon/houdini/package.py b/server_addon/houdini/package.py new file mode 100644 index 0000000000..4e441c76ae --- /dev/null +++ b/server_addon/houdini/package.py @@ -0,0 +1,3 @@ +name = "houdini" +title = "Houdini" +version = "0.2.13" diff --git a/server_addon/houdini/server/__init__.py b/server_addon/houdini/server/__init__.py index 870ec2d0b7..8c1ffcb0b3 100644 --- a/server_addon/houdini/server/__init__.py +++ b/server_addon/houdini/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import HoudiniSettings, DEFAULT_VALUES class Houdini(BaseServerAddon): - name = "houdini" - title = "Houdini" - version = __version__ settings_model: Type[HoudiniSettings] = HoudiniSettings async def get_default_settings(self): diff --git a/server_addon/houdini/server/settings/imageio.py b/server_addon/houdini/server/settings/imageio.py index f4850c5df7..c4f4813d51 100644 --- a/server_addon/houdini/server/settings/imageio.py +++ b/server_addon/houdini/server/settings/imageio.py @@ -34,6 +34,34 @@ class ImageIOFileRulesModel(BaseSettingsModel): return value +class WorkfileImageIOModel(BaseSettingsModel): + """Workfile settings help. + + Empty values will be skipped, allowing any existing env vars to + pass through as defined. + + Note: The render space in Houdini is + always set to the 'scene_linear' role.""" + + enabled: bool = SettingsField(False, title="Enabled") + default_display: str = SettingsField( + title="Default active displays", + description="It behaves like the 'OCIO_ACTIVE_DISPLAYS' env var," + " Colon-separated list of displays, e.g ACES:P3" + ) + default_view: str = SettingsField( + title="Default active views", + description="It behaves like the 'OCIO_ACTIVE_VIEWS' env var," + " Colon-separated list of views, e.g sRGB:DCDM" + ) + review_color_space: str = SettingsField( + title="Review colorspace", + description="It exposes OCIO Colorspace parameter in opengl nodes." + "if left empty, Ayon will figure out the default " + "colorspace using your default display and default view." + ) + + class HoudiniImageIOModel(BaseSettingsModel): activate_host_color_management: bool = SettingsField( True, title="Enable Color Management" @@ -46,3 +74,26 @@ class HoudiniImageIOModel(BaseSettingsModel): default_factory=ImageIOFileRulesModel, title="File Rules" ) + workfile: WorkfileImageIOModel = SettingsField( + default_factory=WorkfileImageIOModel, + title="Workfile" + ) + + +DEFAULT_IMAGEIO_SETTINGS = { + "activate_host_color_management": False, + "ocio_config": { + "override_global_config": False, + "filepath": [] + }, + "file_rules": { + "activate_host_rules": False, + "rules": [] + }, + "workfile": { + "enabled": False, + "default_display": "ACES", + "default_view": "sRGB", + "review_color_space": "" + } +} diff --git a/server_addon/houdini/server/settings/main.py b/server_addon/houdini/server/settings/main.py index cbb19d15b7..3acab0ce74 100644 --- a/server_addon/houdini/server/settings/main.py +++ b/server_addon/houdini/server/settings/main.py @@ -3,7 +3,10 @@ from .general import ( GeneralSettingsModel, DEFAULT_GENERAL_SETTINGS ) -from .imageio import HoudiniImageIOModel +from .imageio import ( + HoudiniImageIOModel, + DEFAULT_IMAGEIO_SETTINGS +) from .shelves import ShelvesModel from .create import ( CreatePluginsModel, @@ -40,6 +43,7 @@ class HoudiniSettings(BaseSettingsModel): DEFAULT_VALUES = { "general": DEFAULT_GENERAL_SETTINGS, + "imageio": DEFAULT_IMAGEIO_SETTINGS, "shelves": [], "create": DEFAULT_HOUDINI_CREATE_SETTINGS, "publish": DEFAULT_HOUDINI_PUBLISH_SETTINGS diff --git a/server_addon/houdini/server/version.py b/server_addon/houdini/server/version.py deleted file mode 100644 index b5c9b6cb71..0000000000 --- a/server_addon/houdini/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.2.12" diff --git a/server_addon/max/package.py b/server_addon/max/package.py new file mode 100644 index 0000000000..fb1f1b3050 --- /dev/null +++ b/server_addon/max/package.py @@ -0,0 +1,3 @@ +name = "max" +title = "Max" +version = "0.1.7" diff --git a/server_addon/max/server/__init__.py b/server_addon/max/server/__init__.py index 31c694a084..d03b29d249 100644 --- a/server_addon/max/server/__init__.py +++ b/server_addon/max/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import MaxSettings, DEFAULT_VALUES class MaxAddon(BaseServerAddon): - name = "max" - title = "Max" - version = __version__ settings_model: Type[MaxSettings] = MaxSettings async def get_default_settings(self): diff --git a/server_addon/max/server/version.py b/server_addon/max/server/version.py deleted file mode 100644 index f1380eede2..0000000000 --- a/server_addon/max/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.7" diff --git a/server_addon/maya/package.py b/server_addon/maya/package.py new file mode 100644 index 0000000000..00f28d901e --- /dev/null +++ b/server_addon/maya/package.py @@ -0,0 +1,3 @@ +name = "maya" +title = "Maya" +version = "0.1.16" diff --git a/server_addon/maya/server/__init__.py b/server_addon/maya/server/__init__.py index 8784427dcf..6dda2cdd77 100644 --- a/server_addon/maya/server/__init__.py +++ b/server_addon/maya/server/__init__.py @@ -2,13 +2,9 @@ from ayon_server.addons import BaseServerAddon from .settings.main import MayaSettings, DEFAULT_MAYA_SETTING -from .version import __version__ class MayaAddon(BaseServerAddon): - name = "maya" - title = "Maya" - version = __version__ settings_model = MayaSettings async def get_default_settings(self): diff --git a/server_addon/maya/server/settings/loaders.py b/server_addon/maya/server/settings/loaders.py index 4e949f616a..2f104d2858 100644 --- a/server_addon/maya/server/settings/loaders.py +++ b/server_addon/maya/server/settings/loaders.py @@ -1,5 +1,5 @@ from ayon_server.settings import BaseSettingsModel, SettingsField -from ayon_server.types import ColorRGB_float, ColorRGBA_uint8 +from ayon_server.types import ColorRGBA_uint8 class LoaderEnabledModel(BaseSettingsModel): @@ -103,6 +103,17 @@ class ImportLoaderModel(BaseSettingsModel): group_name: str = SettingsField(title="Group name") +class YetiRigLoaderModel(LoaderEnabledModel): + create_cache_instance_on_load: bool = SettingsField( + title="Create Yeti Cache instance on load", + description=( + "When enabled, upon loading a Yeti Rig product a new Yeti cache " + "instance is automatically created as preparation to publishing " + "the output directly." + ) + ) + + class LoadersModel(BaseSettingsModel): colors: ColorsSetting = SettingsField( default_factory=ColorsSetting, @@ -195,8 +206,8 @@ class LoadersModel(BaseSettingsModel): default_factory=LoaderEnabledModel, title="Yeti Cache Loader" ) - YetiRigLoader: LoaderEnabledModel = SettingsField( - default_factory=LoaderEnabledModel, + YetiRigLoader: YetiRigLoaderModel = SettingsField( + default_factory=YetiRigLoaderModel, title="Yeti Rig Loader" ) @@ -266,5 +277,8 @@ DEFAULT_LOADERS_SETTING = { "VRaySceneLoader": {"enabled": True}, "XgenLoader": {"enabled": True}, "YetiCacheLoader": {"enabled": True}, - "YetiRigLoader": {"enabled": True}, + "YetiRigLoader": { + "enabled": True, + "create_cache_instance_on_load": True + }, } diff --git a/server_addon/maya/server/settings/publish_playblast.py b/server_addon/maya/server/settings/publish_playblast.py index 39f48bacbe..d513a43e99 100644 --- a/server_addon/maya/server/settings/publish_playblast.py +++ b/server_addon/maya/server/settings/publish_playblast.py @@ -6,7 +6,7 @@ from ayon_server.settings import ( ensure_unique_names, task_types_enum, ) -from ayon_server.types import ColorRGBA_uint8, ColorRGB_float +from ayon_server.types import ColorRGBA_uint8 def hardware_falloff_enum(): diff --git a/server_addon/maya/server/version.py b/server_addon/maya/server/version.py deleted file mode 100644 index 75b463f198..0000000000 --- a/server_addon/maya/server/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring addon version.""" -__version__ = "0.1.15" diff --git a/server_addon/nuke/package.py b/server_addon/nuke/package.py new file mode 100644 index 0000000000..bf03c4e7e7 --- /dev/null +++ b/server_addon/nuke/package.py @@ -0,0 +1,3 @@ +name = "nuke" +title = "Nuke" +version = "0.1.11" diff --git a/server_addon/nuke/server/__init__.py b/server_addon/nuke/server/__init__.py index 032ceea5fb..aeb5e36675 100644 --- a/server_addon/nuke/server/__init__.py +++ b/server_addon/nuke/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import NukeSettings, DEFAULT_VALUES class NukeAddon(BaseServerAddon): - name = "nuke" - title = "Nuke" - version = __version__ settings_model: Type[NukeSettings] = NukeSettings async def get_default_settings(self): diff --git a/server_addon/nuke/server/version.py b/server_addon/nuke/server/version.py deleted file mode 100644 index 0c5c30071a..0000000000 --- a/server_addon/nuke/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.11" diff --git a/server_addon/photoshop/package.py b/server_addon/photoshop/package.py new file mode 100644 index 0000000000..25615529d1 --- /dev/null +++ b/server_addon/photoshop/package.py @@ -0,0 +1,3 @@ +name = "photoshop" +title = "Photoshop" +version = "0.1.2" diff --git a/server_addon/photoshop/server/__init__.py b/server_addon/photoshop/server/__init__.py index 3a45f7a809..86d1025a2d 100644 --- a/server_addon/photoshop/server/__init__.py +++ b/server_addon/photoshop/server/__init__.py @@ -1,14 +1,9 @@ from ayon_server.addons import BaseServerAddon from .settings import PhotoshopSettings, DEFAULT_PHOTOSHOP_SETTING -from .version import __version__ class Photoshop(BaseServerAddon): - name = "photoshop" - title = "Photoshop" - version = __version__ - settings_model = PhotoshopSettings async def get_default_settings(self): diff --git a/server_addon/photoshop/server/version.py b/server_addon/photoshop/server/version.py deleted file mode 100644 index df0c92f1e2..0000000000 --- a/server_addon/photoshop/server/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring addon version.""" -__version__ = "0.1.2" diff --git a/server_addon/resolve/package.py b/server_addon/resolve/package.py new file mode 100644 index 0000000000..cf92413bce --- /dev/null +++ b/server_addon/resolve/package.py @@ -0,0 +1,3 @@ +name = "resolve" +title = "DaVinci Resolve" +version = "0.1.0" diff --git a/server_addon/resolve/server/__init__.py b/server_addon/resolve/server/__init__.py index a84180d0f5..35d2db19e4 100644 --- a/server_addon/resolve/server/__init__.py +++ b/server_addon/resolve/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import ResolveSettings, DEFAULT_VALUES class ResolveAddon(BaseServerAddon): - name = "resolve" - title = "DaVinci Resolve" - version = __version__ settings_model: Type[ResolveSettings] = ResolveSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/resolve/server/version.py b/server_addon/resolve/server/version.py deleted file mode 100644 index 3dc1f76bc6..0000000000 --- a/server_addon/resolve/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.0" diff --git a/server_addon/royal_render/server/version.py b/server_addon/royal_render/server/version.py deleted file mode 100644 index 485f44ac21..0000000000 --- a/server_addon/royal_render/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.1" diff --git a/server_addon/royalrender/package.py b/server_addon/royalrender/package.py new file mode 100644 index 0000000000..1fdea4abbb --- /dev/null +++ b/server_addon/royalrender/package.py @@ -0,0 +1,3 @@ +name = "royalrender" +title = "Royal Render" +version = "0.1.1" diff --git a/server_addon/royal_render/server/__init__.py b/server_addon/royalrender/server/__init__.py similarity index 77% rename from server_addon/royal_render/server/__init__.py rename to server_addon/royalrender/server/__init__.py index c5f0aafa00..5b10678136 100644 --- a/server_addon/royal_render/server/__init__.py +++ b/server_addon/royalrender/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import RoyalRenderSettings, DEFAULT_VALUES class RoyalRenderAddon(BaseServerAddon): - name = "royalrender" - version = __version__ - title = "Royal Render" settings_model: Type[RoyalRenderSettings] = RoyalRenderSettings async def get_default_settings(self): diff --git a/server_addon/royal_render/server/settings.py b/server_addon/royalrender/server/settings.py similarity index 100% rename from server_addon/royal_render/server/settings.py rename to server_addon/royalrender/server/settings.py diff --git a/server_addon/substancepainter/package.py b/server_addon/substancepainter/package.py new file mode 100644 index 0000000000..d445b0059f --- /dev/null +++ b/server_addon/substancepainter/package.py @@ -0,0 +1,3 @@ +name = "substancepainter" +title = "Substance Painter" +version = "0.1.1" diff --git a/server_addon/substancepainter/server/__init__.py b/server_addon/substancepainter/server/__init__.py index 2bf808d508..f6cd51e610 100644 --- a/server_addon/substancepainter/server/__init__.py +++ b/server_addon/substancepainter/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import SubstancePainterSettings, DEFAULT_SPAINTER_SETTINGS class SubstancePainterAddon(BaseServerAddon): - name = "substancepainter" - title = "Substance Painter" - version = __version__ settings_model: Type[SubstancePainterSettings] = SubstancePainterSettings async def get_default_settings(self): diff --git a/server_addon/substancepainter/server/settings/load_plugins.py b/server_addon/substancepainter/server/settings/load_plugins.py new file mode 100644 index 0000000000..e6b2fd86c3 --- /dev/null +++ b/server_addon/substancepainter/server/settings/load_plugins.py @@ -0,0 +1,122 @@ +from ayon_server.settings import BaseSettingsModel, SettingsField + + +def normal_map_format_enum(): + return [ + {"label": "DirectX", "value": "NormalMapFormat.DirectX"}, + {"label": "OpenGL", "value": "NormalMapFormat.OpenGL"}, + ] + + +def tangent_space_enum(): + return [ + {"label": "Per Fragment", "value": "TangentSpace.PerFragment"}, + {"label": "Per Vertex", "value": "TangentSpace.PerVertex"}, + ] + + +def uv_workflow_enum(): + return [ + {"label": "Default", "value": "ProjectWorkflow.Default"}, + {"label": "UV Tile", "value": "ProjectWorkflow.UVTile"}, + {"label": "Texture Set Per UV Tile", + "value": "ProjectWorkflow.TextureSetPerUVTile"} + ] + + +def document_resolution_enum(): + return [ + {"label": "128", "value": 128}, + {"label": "256", "value": 256}, + {"label": "512", "value": 512}, + {"label": "1024", "value": 1024}, + {"label": "2048", "value": 2048}, + {"label": "4096", "value": 4096} + ] + + +class ProjectTemplatesModel(BaseSettingsModel): + _layout = "expanded" + name: str = SettingsField("default", title="Template Name") + default_texture_resolution: int = SettingsField( + 1024, enum_resolver=document_resolution_enum, + title="Document Resolution", + description=("Set texture resolution when " + "creating new project.") + ) + import_cameras: bool = SettingsField( + True, title="Import Cameras", + description="Import cameras from the mesh file.") + normal_map_format: str = SettingsField( + "DirectX", enum_resolver=normal_map_format_enum, + title="Normal Map Format", + description=("Set normal map format when " + "creating new project.") + ) + project_workflow: str = SettingsField( + "Default", enum_resolver=uv_workflow_enum, + title="UV Tile Settings", + description=("Set UV workflow when " + "creating new project.") + ) + tangent_space_mode: str = SettingsField( + "PerFragment", enum_resolver=tangent_space_enum, + title="Tangent Space", + description=("An option to compute tangent space " + "when creating new project.") + ) + preserve_strokes: bool = SettingsField( + True, title="Preserve Strokes", + description=("Preserve strokes positions on mesh.\n" + "(only relevant when loading into " + "existing project)") + ) + + +class ProjectTemplateSettingModel(BaseSettingsModel): + project_templates: list[ProjectTemplatesModel] = SettingsField( + default_factory=ProjectTemplatesModel, + title="Project Templates" + ) + + +class LoadersModel(BaseSettingsModel): + SubstanceLoadProjectMesh: ProjectTemplateSettingModel = SettingsField( + default_factory=ProjectTemplateSettingModel, + title="Load Mesh" + ) + + +DEFAULT_LOADER_SETTINGS = { + "SubstanceLoadProjectMesh": { + "project_templates": [ + { + "name": "2K(Default)", + "default_texture_resolution": 2048, + "import_cameras": True, + "normal_map_format": "NormalMapFormat.DirectX", + "project_workflow": "ProjectWorkflow.Default", + "tangent_space_mode": "TangentSpace.PerFragment", + "preserve_strokes": True + }, + { + "name": "2K(UV tile)", + "default_texture_resolution": 2048, + "import_cameras": True, + "normal_map_format": "NormalMapFormat.DirectX", + "project_workflow": "ProjectWorkflow.UVTile", + "tangent_space_mode": "TangentSpace.PerFragment", + "preserve_strokes": True + }, + { + "name": "4K(Custom)", + "default_texture_resolution": 4096, + "import_cameras": True, + "normal_map_format": "NormalMapFormat.OpenGL", + "project_workflow": "ProjectWorkflow.UVTile", + "tangent_space_mode": "TangentSpace.PerFragment", + "preserve_strokes": True + } + ] + } +} diff --git a/server_addon/substancepainter/server/settings/main.py b/server_addon/substancepainter/server/settings/main.py index f80fa9fe1e..93523fd650 100644 --- a/server_addon/substancepainter/server/settings/main.py +++ b/server_addon/substancepainter/server/settings/main.py @@ -1,5 +1,6 @@ from ayon_server.settings import BaseSettingsModel, SettingsField from .imageio import ImageIOSettings, DEFAULT_IMAGEIO_SETTINGS +from .load_plugins import LoadersModel, DEFAULT_LOADER_SETTINGS class ShelvesSettingsModel(BaseSettingsModel): @@ -17,9 +18,12 @@ class SubstancePainterSettings(BaseSettingsModel): default_factory=list, title="Shelves" ) + load: LoadersModel = SettingsField( + default_factory=DEFAULT_LOADER_SETTINGS, title="Loaders") DEFAULT_SPAINTER_SETTINGS = { "imageio": DEFAULT_IMAGEIO_SETTINGS, - "shelves": [] + "shelves": [], + "load": DEFAULT_LOADER_SETTINGS, } diff --git a/server_addon/substancepainter/server/version.py b/server_addon/substancepainter/server/version.py deleted file mode 100644 index 3dc1f76bc6..0000000000 --- a/server_addon/substancepainter/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.0" diff --git a/server_addon/timers_manager/package.py b/server_addon/timers_manager/package.py new file mode 100644 index 0000000000..bd6b81b4b7 --- /dev/null +++ b/server_addon/timers_manager/package.py @@ -0,0 +1,3 @@ +name = "timers_manager" +title = "Timers Manager" +version = "0.1.1" diff --git a/server_addon/timers_manager/server/__init__.py b/server_addon/timers_manager/server/__init__.py index 29f9d47370..32e83d295c 100644 --- a/server_addon/timers_manager/server/__init__.py +++ b/server_addon/timers_manager/server/__init__.py @@ -2,12 +2,8 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import TimersManagerSettings class TimersManagerAddon(BaseServerAddon): - name = "timers_manager" - version = __version__ - title = "Timers Manager" settings_model: Type[TimersManagerSettings] = TimersManagerSettings diff --git a/server_addon/timers_manager/server/version.py b/server_addon/timers_manager/server/version.py deleted file mode 100644 index 485f44ac21..0000000000 --- a/server_addon/timers_manager/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.1" diff --git a/server_addon/traypublisher/package.py b/server_addon/traypublisher/package.py new file mode 100644 index 0000000000..4ca8ae9fd3 --- /dev/null +++ b/server_addon/traypublisher/package.py @@ -0,0 +1,3 @@ +name = "traypublisher" +title = "TrayPublisher" +version = "0.1.4" diff --git a/server_addon/traypublisher/server/__init__.py b/server_addon/traypublisher/server/__init__.py index e6f079609f..830f325ac0 100644 --- a/server_addon/traypublisher/server/__init__.py +++ b/server_addon/traypublisher/server/__init__.py @@ -1,14 +1,9 @@ from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import TraypublisherSettings, DEFAULT_TRAYPUBLISHER_SETTING class Traypublisher(BaseServerAddon): - name = "traypublisher" - title = "TrayPublisher" - version = __version__ - settings_model = TraypublisherSettings async def get_default_settings(self): diff --git a/server_addon/traypublisher/server/settings/creator_plugins.py b/server_addon/traypublisher/server/settings/creator_plugins.py index bf66d9a088..1ff14002aa 100644 --- a/server_addon/traypublisher/server/settings/creator_plugins.py +++ b/server_addon/traypublisher/server/settings/creator_plugins.py @@ -1,4 +1,7 @@ +from pydantic import validator from ayon_server.settings import BaseSettingsModel, SettingsField +from ayon_server.settings.validators import ensure_unique_names +from ayon_server.exceptions import BadRequestException class BatchMovieCreatorPlugin(BaseSettingsModel): @@ -22,11 +25,139 @@ class BatchMovieCreatorPlugin(BaseSettingsModel): ) +class ColumnItemModel(BaseSettingsModel): + """Allows to publish multiple video files in one go.
Name of matching + asset is parsed from file names ('asset.mov', 'asset_v001.mov', + 'my_asset_to_publish.mov')""" + + name: str = SettingsField( + title="Name", + default="" + ) + + type: str = SettingsField( + title="Type", + default="" + ) + + default: str = SettingsField( + title="Default", + default="" + ) + + required_column: bool = SettingsField( + title="Required Column", + default=False + ) + + validation_pattern: str = SettingsField( + title="Validation Regex Pattern", + default="^(.*)$" + ) + + +class ColumnConfigModel(BaseSettingsModel): + """Allows to publish multiple video files in one go.
Name of matching + asset is parsed from file names ('asset.mov', 'asset_v001.mov', + 'my_asset_to_publish.mov')""" + + csv_delimiter: str = SettingsField( + title="CSV delimiter", + default="," + ) + + columns: list[ColumnItemModel] = SettingsField( + title="Columns", + default_factory=list + ) + + @validator("columns") + def validate_unique_outputs(cls, value): + ensure_unique_names(value) + return value + + +class RepresentationItemModel(BaseSettingsModel): + """Allows to publish multiple video files in one go. + + Name of matching asset is parsed from file names + ('asset.mov', 'asset_v001.mov', 'my_asset_to_publish.mov') + """ + + name: str = SettingsField( + title="Name", + default="" + ) + + extensions: list[str] = SettingsField( + title="Extensions", + default_factory=list + ) + + @validator("extensions") + def validate_extension(cls, value): + for ext in value: + if not ext.startswith("."): + raise BadRequestException(f"Extension must start with '.': {ext}") + return value + + +class RepresentationConfigModel(BaseSettingsModel): + """Allows to publish multiple video files in one go.
Name of matching + asset is parsed from file names ('asset.mov', 'asset_v001.mov', + 'my_asset_to_publish.mov')""" + + tags_delimiter: str = SettingsField( + title="Tags delimiter", + default=";" + ) + + default_tags: list[str] = SettingsField( + title="Default tags", + default_factory=list + ) + + representations: list[RepresentationItemModel] = SettingsField( + title="Representations", + default_factory=list + ) + + @validator("representations") + def validate_unique_outputs(cls, value): + ensure_unique_names(value) + return value + + +class IngestCSVPluginModel(BaseSettingsModel): + """Allows to publish multiple video files in one go.
Name of matching + asset is parsed from file names ('asset.mov', 'asset_v001.mov', + 'my_asset_to_publish.mov')""" + + enabled: bool = SettingsField( + title="Enabled", + default=False + ) + + columns_config: ColumnConfigModel = SettingsField( + title="Columns config", + default_factory=ColumnConfigModel + ) + + representations_config: RepresentationConfigModel = SettingsField( + title="Representations config", + default_factory=RepresentationConfigModel + ) + + class TrayPublisherCreatePluginsModel(BaseSettingsModel): BatchMovieCreator: BatchMovieCreatorPlugin = SettingsField( title="Batch Movie Creator", default_factory=BatchMovieCreatorPlugin ) + IngestCSV: IngestCSVPluginModel = SettingsField( + title="Ingest CSV", + default_factory=IngestCSVPluginModel + ) DEFAULT_CREATORS = { @@ -41,4 +172,170 @@ DEFAULT_CREATORS = { ".mov" ] }, + "IngestCSV": { + "enabled": True, + "columns_config": { + "csv_delimiter": ",", + "columns": [ + { + "name": "File Path", + "type": "text", + "default": "", + "required_column": True, + "validation_pattern": "^([a-z0-9#._\\/]*)$" + }, + { + "name": "Folder Path", + "type": "text", + "default": "", + "required_column": True, + "validation_pattern": "^([a-zA-Z0-9_\\/]*)$" + }, + { + "name": "Task Name", + "type": "text", + "default": "", + "required_column": True, + "validation_pattern": "^(.*)$" + }, + { + "name": "Product Type", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + }, + { + "name": "Variant", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + }, + { + "name": "Version", + "type": "number", + "default": 1, + "required_column": True, + "validation_pattern": "^(\\d{1,3})$" + }, + { + "name": "Version Comment", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + }, + { + "name": "Version Thumbnail", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^([a-zA-Z0-9#._\\/]*)$" + }, + { + "name": "Frame Start", + "type": "number", + "default": 0, + "required_column": True, + "validation_pattern": "^(\\d{1,8})$" + }, + { + "name": "Frame End", + "type": "number", + "default": 0, + "required_column": True, + "validation_pattern": "^(\\d{1,8})$" + }, + { + "name": "Handle Start", + "type": "number", + "default": 0, + "required_column": True, + "validation_pattern": "^(\\d)$" + }, + { + "name": "Handle End", + "type": "number", + "default": 0, + "required_column": True, + "validation_pattern": "^(\\d)$" + }, + { + "name": "FPS", + "type": "decimal", + "default": 0.0, + "required_column": True, + "validation_pattern": "^[0-9]*\\.[0-9]+$|^[0-9]+$" + }, + { + "name": "Slate Exists", + "type": "bool", + "default": True, + "required_column": False, + "validation_pattern": "(True|False)" + }, + { + "name": "Representation", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + }, + { + "name": "Representation Colorspace", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + }, + { + "name": "Representation Tags", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + } + ] + }, + "representations_config": { + "tags_delimiter": ";", + "default_tags": [ + "review" + ], + "representations": [ + { + "name": "preview", + "extensions": [ + ".mp4", + ".mov" + ] + }, + { + "name": "exr", + "extensions": [ + ".exr" + ] + }, + { + "name": "edit", + "extensions": [ + ".mov" + ] + }, + { + "name": "review", + "extensions": [ + ".mov" + ] + }, + { + "name": "nuke", + "extensions": [ + ".nk" + ] + } + ] + } + } } diff --git a/server_addon/traypublisher/server/version.py b/server_addon/traypublisher/server/version.py deleted file mode 100644 index de699158fd..0000000000 --- a/server_addon/traypublisher/server/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring addon version.""" -__version__ = "0.1.4" diff --git a/server_addon/tvpaint/package.py b/server_addon/tvpaint/package.py new file mode 100644 index 0000000000..2be3164f4a --- /dev/null +++ b/server_addon/tvpaint/package.py @@ -0,0 +1,3 @@ +name = "tvpaint" +title = "TVPaint" +version = "0.1.2" diff --git a/server_addon/tvpaint/server/__init__.py b/server_addon/tvpaint/server/__init__.py index 033d7d3792..658dcf0bb6 100644 --- a/server_addon/tvpaint/server/__init__.py +++ b/server_addon/tvpaint/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import TvpaintSettings, DEFAULT_VALUES class TvpaintAddon(BaseServerAddon): - name = "tvpaint" - title = "TVPaint" - version = __version__ settings_model: Type[TvpaintSettings] = TvpaintSettings async def get_default_settings(self): diff --git a/server_addon/tvpaint/server/settings/publish_plugins.py b/server_addon/tvpaint/server/settings/publish_plugins.py index 0d978e5714..db1c7bd11a 100644 --- a/server_addon/tvpaint/server/settings/publish_plugins.py +++ b/server_addon/tvpaint/server/settings/publish_plugins.py @@ -1,5 +1,5 @@ from ayon_server.settings import BaseSettingsModel, SettingsField -from ayon_server.types import ColorRGBA_uint8, ColorRGB_uint8 +from ayon_server.types import ColorRGBA_uint8 class CollectRenderInstancesModel(BaseSettingsModel): diff --git a/server_addon/tvpaint/server/version.py b/server_addon/tvpaint/server/version.py deleted file mode 100644 index b3f4756216..0000000000 --- a/server_addon/tvpaint/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.2" diff --git a/server_addon/unreal/package.py b/server_addon/unreal/package.py new file mode 100644 index 0000000000..cab89ca873 --- /dev/null +++ b/server_addon/unreal/package.py @@ -0,0 +1,3 @@ +name = "unreal" +title = "Unreal" +version = "0.1.0" diff --git a/server_addon/unreal/server/__init__.py b/server_addon/unreal/server/__init__.py index a5f3e9597d..751560b623 100644 --- a/server_addon/unreal/server/__init__.py +++ b/server_addon/unreal/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import UnrealSettings, DEFAULT_VALUES class UnrealAddon(BaseServerAddon): - name = "unreal" - title = "Unreal" - version = __version__ settings_model: Type[UnrealSettings] = UnrealSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/unreal/server/version.py b/server_addon/unreal/server/version.py deleted file mode 100644 index 3dc1f76bc6..0000000000 --- a/server_addon/unreal/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.0"