From 58693b4fa5c3f118312f9ecd81780ab33f38422f Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 3 Jul 2024 12:45:30 +0200 Subject: [PATCH] removed blender addon --- .../blender/client/ayon_blender/__init__.py | 13 - .../blender/client/ayon_blender/addon.py | 71 --- .../client/ayon_blender/api/__init__.py | 72 --- .../blender/client/ayon_blender/api/action.py | 47 -- .../client/ayon_blender/api/capture.py | 282 --------- .../client/ayon_blender/api/colorspace.py | 51 -- .../ayon_blender/api/icons/pyblish-32x32.png | Bin 632 -> 0 bytes .../blender/client/ayon_blender/api/lib.py | 426 ------------- .../blender/client/ayon_blender/api/ops.py | 456 -------------- .../client/ayon_blender/api/pipeline.py | 574 ------------------ .../blender/client/ayon_blender/api/plugin.py | 542 ----------------- .../client/ayon_blender/api/render_lib.py | 364 ----------- .../blender/client/ayon_blender/api/workio.py | 89 --- .../blender_addon/startup/init.py | 10 - .../hooks/pre_add_run_python_script_arg.py | 54 -- .../ayon_blender/hooks/pre_pyside_install.py | 295 --------- .../ayon_blender/hooks/pre_windows_console.py | 29 - .../plugins/create/convert_legacy.py | 78 --- .../plugins/create/create_action.py | 41 -- .../plugins/create/create_animation.py | 32 - .../plugins/create/create_blendScene.py | 34 -- .../plugins/create/create_camera.py | 42 -- .../plugins/create/create_layout.py | 32 - .../plugins/create/create_model.py | 31 - .../plugins/create/create_pointcache.py | 29 - .../plugins/create/create_render.py | 45 -- .../plugins/create/create_review.py | 27 - .../ayon_blender/plugins/create/create_rig.py | 31 - .../ayon_blender/plugins/create/create_usd.py | 30 - .../plugins/create/create_workfile.py | 132 ---- .../plugins/load/import_workfile.py | 84 --- .../ayon_blender/plugins/load/load_action.py | 293 --------- .../plugins/load/load_animation.py | 70 --- .../ayon_blender/plugins/load/load_audio.py | 227 ------- .../ayon_blender/plugins/load/load_blend.py | 286 --------- .../plugins/load/load_blendscene.py | 235 ------- .../ayon_blender/plugins/load/load_cache.py | 284 --------- .../plugins/load/load_camera_abc.py | 238 -------- .../plugins/load/load_camera_fbx.py | 224 ------- .../ayon_blender/plugins/load/load_fbx.py | 279 --------- .../plugins/load/load_layout_json.py | 297 --------- .../ayon_blender/plugins/load/load_look.py | 223 ------- .../plugins/publish/collect_current_file.py | 15 - .../publish/collect_file_dependencies.py | 36 -- .../plugins/publish/collect_instance.py | 44 -- .../plugins/publish/collect_render.py | 120 ---- .../plugins/publish/collect_review.py | 68 --- .../plugins/publish/collect_workfile.py | 38 -- .../plugins/publish/extract_abc.py | 94 --- .../plugins/publish/extract_abc_animation.py | 80 --- .../plugins/publish/extract_blend.py | 76 --- .../publish/extract_blend_animation.py | 67 -- .../plugins/publish/extract_camera_abc.py | 70 --- .../plugins/publish/extract_camera_fbx.py | 85 --- .../plugins/publish/extract_fbx.py | 93 --- .../plugins/publish/extract_fbx_animation.py | 227 ------- .../plugins/publish/extract_layout.py | 279 --------- .../plugins/publish/extract_playblast.py | 129 ---- .../plugins/publish/extract_thumbnail.py | 107 ---- .../plugins/publish/extract_usd.py | 90 --- .../publish/increment_workfile_version.py | 33 - .../plugins/publish/integrate_animation.py | 54 -- .../publish/validate_camera_zero_keyframe.py | 57 -- .../publish/validate_deadline_publish.py | 61 -- .../plugins/publish/validate_file_saved.py | 66 -- .../publish/validate_instance_empty.py | 20 - .../plugins/publish/validate_mesh_has_uv.py | 65 -- .../validate_mesh_no_negative_scale.py | 44 -- .../plugins/publish/validate_model_uv_map1.py | 93 --- .../publish/validate_no_colons_in_name.py | 53 -- .../plugins/publish/validate_object_mode.py | 44 -- .../publish/validate_render_camera_is_set.py | 29 - .../publish/validate_transform_zero.py | 94 --- .../blender/client/ayon_blender/version.py | 3 - server_addon/blender/package.py | 11 - server_addon/blender/server/__init__.py | 13 - .../blender/server/settings/__init__.py | 10 - .../blender/server/settings/imageio.py | 63 -- server_addon/blender/server/settings/main.py | 70 --- .../server/settings/publish_plugins.py | 361 ----------- .../server/settings/render_settings.py | 158 ----- 81 files changed, 9819 deletions(-) delete mode 100644 server_addon/blender/client/ayon_blender/__init__.py delete mode 100644 server_addon/blender/client/ayon_blender/addon.py delete mode 100644 server_addon/blender/client/ayon_blender/api/__init__.py delete mode 100644 server_addon/blender/client/ayon_blender/api/action.py delete mode 100644 server_addon/blender/client/ayon_blender/api/capture.py delete mode 100644 server_addon/blender/client/ayon_blender/api/colorspace.py delete mode 100644 server_addon/blender/client/ayon_blender/api/icons/pyblish-32x32.png delete mode 100644 server_addon/blender/client/ayon_blender/api/lib.py delete mode 100644 server_addon/blender/client/ayon_blender/api/ops.py delete mode 100644 server_addon/blender/client/ayon_blender/api/pipeline.py delete mode 100644 server_addon/blender/client/ayon_blender/api/plugin.py delete mode 100644 server_addon/blender/client/ayon_blender/api/render_lib.py delete mode 100644 server_addon/blender/client/ayon_blender/api/workio.py delete mode 100644 server_addon/blender/client/ayon_blender/blender_addon/startup/init.py delete mode 100644 server_addon/blender/client/ayon_blender/hooks/pre_add_run_python_script_arg.py delete mode 100644 server_addon/blender/client/ayon_blender/hooks/pre_pyside_install.py delete mode 100644 server_addon/blender/client/ayon_blender/hooks/pre_windows_console.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/create/convert_legacy.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/create/create_action.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/create/create_animation.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/create/create_blendScene.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/create/create_camera.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/create/create_layout.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/create/create_model.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/create/create_pointcache.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/create/create_render.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/create/create_review.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/create/create_rig.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/create/create_usd.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/create/create_workfile.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/load/import_workfile.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/load/load_action.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/load/load_animation.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/load/load_audio.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/load/load_blend.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/load/load_blendscene.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/load/load_cache.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/load/load_camera_abc.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/load/load_camera_fbx.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/load/load_fbx.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/load/load_layout_json.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/load/load_look.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/collect_current_file.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/collect_file_dependencies.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/collect_instance.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/collect_render.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/collect_review.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/collect_workfile.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/extract_abc.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/extract_abc_animation.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/extract_blend.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/extract_blend_animation.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/extract_camera_abc.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/extract_camera_fbx.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/extract_fbx.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/extract_fbx_animation.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/extract_layout.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/extract_playblast.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/extract_thumbnail.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/extract_usd.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/increment_workfile_version.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/integrate_animation.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/validate_camera_zero_keyframe.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/validate_deadline_publish.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/validate_file_saved.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/validate_instance_empty.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/validate_mesh_has_uv.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/validate_mesh_no_negative_scale.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/validate_model_uv_map1.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/validate_no_colons_in_name.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/validate_object_mode.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/validate_render_camera_is_set.py delete mode 100644 server_addon/blender/client/ayon_blender/plugins/publish/validate_transform_zero.py delete mode 100644 server_addon/blender/client/ayon_blender/version.py delete mode 100644 server_addon/blender/package.py delete mode 100644 server_addon/blender/server/__init__.py delete mode 100644 server_addon/blender/server/settings/__init__.py delete mode 100644 server_addon/blender/server/settings/imageio.py delete mode 100644 server_addon/blender/server/settings/main.py delete mode 100644 server_addon/blender/server/settings/publish_plugins.py delete mode 100644 server_addon/blender/server/settings/render_settings.py diff --git a/server_addon/blender/client/ayon_blender/__init__.py b/server_addon/blender/client/ayon_blender/__init__.py deleted file mode 100644 index 221dcd4138..0000000000 --- a/server_addon/blender/client/ayon_blender/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -from .version import __version__ -from .addon import ( - BlenderAddon, - BLENDER_ADDON_ROOT, -) - - -__all__ = ( - "__version__", - - "BlenderAddon", - "BLENDER_ADDON_ROOT", -) diff --git a/server_addon/blender/client/ayon_blender/addon.py b/server_addon/blender/client/ayon_blender/addon.py deleted file mode 100644 index 9711580369..0000000000 --- a/server_addon/blender/client/ayon_blender/addon.py +++ /dev/null @@ -1,71 +0,0 @@ -import os -from ayon_core.addon import AYONAddon, IHostAddon - -from .version import __version__ - -BLENDER_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__)) - - -class BlenderAddon(AYONAddon, IHostAddon): - name = "blender" - version = __version__ - host_name = "blender" - - def add_implementation_envs(self, env, _app): - """Modify environments to contain all required for implementation.""" - # Prepare path to implementation script - implementation_user_script_path = os.path.join( - BLENDER_ADDON_ROOT, - "blender_addon" - ) - - # Add blender implementation script path to PYTHONPATH - python_path = env.get("PYTHONPATH") or "" - python_path_parts = [ - path - for path in python_path.split(os.pathsep) - if path - ] - python_path_parts.insert(0, implementation_user_script_path) - env["PYTHONPATH"] = os.pathsep.join(python_path_parts) - - # Modify Blender user scripts path - previous_user_scripts = set() - # Implementation path is added to set for easier paths check inside - # loops - will be removed at the end - previous_user_scripts.add(implementation_user_script_path) - - ayon_blender_user_scripts = ( - env.get("AYON_BLENDER_USER_SCRIPTS") or "" - ) - for path in ayon_blender_user_scripts.split(os.pathsep): - if path: - previous_user_scripts.add(os.path.normpath(path)) - - blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or "" - for path in blender_user_scripts.split(os.pathsep): - if path: - previous_user_scripts.add(os.path.normpath(path)) - - # Remove implementation path from user script paths as is set to - # `BLENDER_USER_SCRIPTS` - previous_user_scripts.remove(implementation_user_script_path) - env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path - - # Set custom user scripts env - env["AYON_BLENDER_USER_SCRIPTS"] = os.pathsep.join( - previous_user_scripts - ) - - # Define Qt binding if not defined - env.pop("QT_PREFERRED_BINDING", None) - - def get_launch_hook_paths(self, app): - if app.host_name != self.host_name: - return [] - return [ - os.path.join(BLENDER_ADDON_ROOT, "hooks") - ] - - def get_workfile_extensions(self): - return [".blend"] diff --git a/server_addon/blender/client/ayon_blender/api/__init__.py b/server_addon/blender/client/ayon_blender/api/__init__.py deleted file mode 100644 index da2a6fbbbb..0000000000 --- a/server_addon/blender/client/ayon_blender/api/__init__.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Public API - -Anything that isn't defined here is INTERNAL and unreliable for external use. - -""" - -from .pipeline import ( - install, - uninstall, - ls, - publish, - containerise, - BlenderHost, -) - -from .plugin import ( - Creator, -) - -from .workio import ( - open_file, - save_file, - current_file, - has_unsaved_changes, - file_extensions, - work_root, -) - -from .lib import ( - lsattr, - lsattrs, - read, - maintained_selection, - maintained_time, - get_selection, - # unique_name, -) - -from .capture import capture - -from .render_lib import prepare_rendering - - -__all__ = [ - "install", - "uninstall", - "ls", - "publish", - "containerise", - "BlenderHost", - - "Creator", - - # Workfiles API - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root", - - # Utility functions - "maintained_selection", - "maintained_time", - "lsattr", - "lsattrs", - "read", - "get_selection", - "capture", - # "unique_name", - "prepare_rendering", -] diff --git a/server_addon/blender/client/ayon_blender/api/action.py b/server_addon/blender/client/ayon_blender/api/action.py deleted file mode 100644 index 865c2443e0..0000000000 --- a/server_addon/blender/client/ayon_blender/api/action.py +++ /dev/null @@ -1,47 +0,0 @@ -import bpy - -import pyblish.api - -from ayon_core.pipeline.publish import get_errored_instances_from_context - - -class SelectInvalidAction(pyblish.api.Action): - """Select invalid objects in Blender when a publish plug-in failed.""" - label = "Select Invalid" - on = "failed" - icon = "search" - - def process(self, context, plugin): - errored_instances = get_errored_instances_from_context(context, - plugin=plugin) - - # Get the invalid nodes for the plug-ins - self.log.info("Finding invalid nodes...") - invalid = list() - for instance in errored_instances: - invalid_nodes = plugin.get_invalid(instance) - if invalid_nodes: - if isinstance(invalid_nodes, (list, tuple)): - invalid.extend(invalid_nodes) - else: - self.log.warning( - "Failed plug-in doesn't have any selectable objects." - ) - - bpy.ops.object.select_all(action='DESELECT') - - # Make sure every node is only processed once - invalid = list(set(invalid)) - if not invalid: - self.log.info("No invalid nodes found.") - return - - invalid_names = [obj.name for obj in invalid] - self.log.info( - "Selecting invalid objects: %s", ", ".join(invalid_names) - ) - # Select the objects and also make the last one the active object. - for obj in invalid: - obj.select_set(True) - - bpy.context.view_layer.objects.active = invalid[-1] diff --git a/server_addon/blender/client/ayon_blender/api/capture.py b/server_addon/blender/client/ayon_blender/api/capture.py deleted file mode 100644 index e5e6041563..0000000000 --- a/server_addon/blender/client/ayon_blender/api/capture.py +++ /dev/null @@ -1,282 +0,0 @@ - -"""Blender Capture -Playblasting with independent viewport, camera and display options -""" -import contextlib -import bpy - -from .lib import maintained_time -from .plugin import deselect_all, create_blender_context - - -def capture( - camera=None, - width=None, - height=None, - filename=None, - start_frame=None, - end_frame=None, - step_frame=None, - sound=None, - isolate=None, - maintain_aspect_ratio=True, - overwrite=False, - image_settings=None, - display_options=None -): - """Playblast in an independent windows - Arguments: - camera (str, optional): Name of camera, defaults to "Camera" - width (int, optional): Width of output in pixels - height (int, optional): Height of output in pixels - filename (str, optional): Name of output file path. Defaults to current - render output path. - start_frame (int, optional): Defaults to current start frame. - end_frame (int, optional): Defaults to current end frame. - step_frame (int, optional): Defaults to 1. - sound (str, optional): Specify the sound node to be used during - playblast. When None (default) no sound will be used. - isolate (list): List of nodes to isolate upon capturing - maintain_aspect_ratio (bool, optional): Modify height in order to - maintain aspect ratio. - overwrite (bool, optional): Whether or not to overwrite if file - already exists. If disabled and file exists and error will be - raised. - image_settings (dict, optional): Supplied image settings for render, - using `ImageSettings` - display_options (dict, optional): Supplied display options for render - """ - - scene = bpy.context.scene - camera = camera or "Camera" - - # Ensure camera exists. - if camera not in scene.objects and camera != "AUTO": - raise RuntimeError("Camera does not exist: {0}".format(camera)) - - # Ensure resolution. - if width and height: - maintain_aspect_ratio = False - width = width or scene.render.resolution_x - height = height or scene.render.resolution_y - if maintain_aspect_ratio: - ratio = scene.render.resolution_x / scene.render.resolution_y - height = round(width / ratio) - - # Get frame range. - if start_frame is None: - start_frame = scene.frame_start - if end_frame is None: - end_frame = scene.frame_end - if step_frame is None: - step_frame = 1 - frame_range = (start_frame, end_frame, step_frame) - - if filename is None: - filename = scene.render.filepath - - render_options = { - "filepath": "{}.".format(filename.rstrip(".")), - "resolution_x": width, - "resolution_y": height, - "use_overwrite": overwrite, - } - - with _independent_window() as window: - - applied_view(window, camera, isolate, options=display_options) - - with contextlib.ExitStack() as stack: - stack.enter_context(maintain_camera(window, camera)) - stack.enter_context(applied_frame_range(window, *frame_range)) - stack.enter_context(applied_render_options(window, render_options)) - stack.enter_context(applied_image_settings(window, image_settings)) - stack.enter_context(maintained_time()) - - bpy.ops.render.opengl( - animation=True, - render_keyed_only=False, - sequencer=False, - write_still=False, - view_context=True - ) - - return filename - - -ImageSettings = { - "file_format": "FFMPEG", - "color_mode": "RGB", - "ffmpeg": { - "format": "QUICKTIME", - "use_autosplit": False, - "codec": "H264", - "constant_rate_factor": "MEDIUM", - "gopsize": 18, - "use_max_b_frames": False, - }, -} - - -def isolate_objects(window, objects): - """Isolate selection""" - deselect_all() - - for obj in objects: - obj.select_set(True) - - context = create_blender_context(selected=objects, window=window) - - with bpy.context.temp_override(**context): - bpy.ops.view3d.view_axis(type="FRONT") - bpy.ops.view3d.localview() - - deselect_all() - - -def _apply_options(entity, options): - for option, value in options.items(): - if isinstance(value, dict): - _apply_options(getattr(entity, option), value) - else: - setattr(entity, option, value) - - -def applied_view(window, camera, isolate=None, options=None): - """Apply view options to window.""" - area = window.screen.areas[0] - space = area.spaces[0] - - area.ui_type = "VIEW_3D" - - types = {"MESH", "GPENCIL"} - objects = [obj for obj in window.scene.objects if obj.type in types] - - if camera == "AUTO": - space.region_3d.view_perspective = "ORTHO" - isolate_objects(window, isolate or objects) - else: - isolate_objects(window, isolate or objects) - space.camera = window.scene.objects.get(camera) - space.region_3d.view_perspective = "CAMERA" - - if isinstance(options, dict): - _apply_options(space, options) - else: - space.shading.type = "SOLID" - space.shading.color_type = "MATERIAL" - space.show_gizmo = False - space.overlay.show_overlays = False - - -@contextlib.contextmanager -def applied_frame_range(window, start, end, step): - """Context manager for setting frame range.""" - # Store current frame range - current_frame_start = window.scene.frame_start - current_frame_end = window.scene.frame_end - current_frame_step = window.scene.frame_step - # Apply frame range - window.scene.frame_start = start - window.scene.frame_end = end - window.scene.frame_step = step - try: - yield - finally: - # Restore frame range - window.scene.frame_start = current_frame_start - window.scene.frame_end = current_frame_end - window.scene.frame_step = current_frame_step - - -@contextlib.contextmanager -def applied_render_options(window, options): - """Context manager for setting render options.""" - render = window.scene.render - - # Store current settings - original = {} - for opt in options.copy(): - try: - original[opt] = getattr(render, opt) - except ValueError: - options.pop(opt) - - # Apply settings - _apply_options(render, options) - - try: - yield - finally: - # Restore previous settings - _apply_options(render, original) - - -@contextlib.contextmanager -def applied_image_settings(window, options): - """Context manager to override image settings.""" - - options = options or ImageSettings.copy() - ffmpeg = options.pop("ffmpeg", {}) - render = window.scene.render - - # Store current image settings - original = {} - for opt in options.copy(): - try: - original[opt] = getattr(render.image_settings, opt) - except ValueError: - options.pop(opt) - - # Store current ffmpeg settings - original_ffmpeg = {} - for opt in ffmpeg.copy(): - try: - original_ffmpeg[opt] = getattr(render.ffmpeg, opt) - except ValueError: - ffmpeg.pop(opt) - - # Apply image settings - for opt, value in options.items(): - setattr(render.image_settings, opt, value) - - # Apply ffmpeg settings - for opt, value in ffmpeg.items(): - setattr(render.ffmpeg, opt, value) - - try: - yield - finally: - # Restore previous settings - for opt, value in original.items(): - setattr(render.image_settings, opt, value) - for opt, value in original_ffmpeg.items(): - setattr(render.ffmpeg, opt, value) - - -@contextlib.contextmanager -def maintain_camera(window, camera): - """Context manager to override camera.""" - current_camera = window.scene.camera - if camera in window.scene.objects: - window.scene.camera = window.scene.objects.get(camera) - try: - yield - finally: - window.scene.camera = current_camera - - -@contextlib.contextmanager -def _independent_window(): - """Create capture-window context.""" - context = create_blender_context() - current_windows = set(bpy.context.window_manager.windows) - with bpy.context.temp_override(**context): - bpy.ops.wm.window_new() - window = list( - set(bpy.context.window_manager.windows) - current_windows)[0] - context["window"] = window - try: - yield window - finally: - bpy.ops.wm.window_close() diff --git a/server_addon/blender/client/ayon_blender/api/colorspace.py b/server_addon/blender/client/ayon_blender/api/colorspace.py deleted file mode 100644 index 4521612b7d..0000000000 --- a/server_addon/blender/client/ayon_blender/api/colorspace.py +++ /dev/null @@ -1,51 +0,0 @@ -import attr - -import bpy - - -@attr.s -class LayerMetadata(object): - """Data class for Render Layer metadata.""" - frameStart = attr.ib() - frameEnd = attr.ib() - - -@attr.s -class RenderProduct(object): - """ - Getting Colorspace as Specific Render Product Parameter for submitting - publish job. - """ - colorspace = attr.ib() # colorspace - view = attr.ib() # OCIO view transform - productName = attr.ib(default=None) - - -class ARenderProduct(object): - def __init__(self): - """Constructor.""" - # Initialize - self.layer_data = self._get_layer_data() - self.layer_data.products = self.get_render_products() - - def _get_layer_data(self): - scene = bpy.context.scene - - return LayerMetadata( - frameStart=int(scene.frame_start), - frameEnd=int(scene.frame_end), - ) - - def get_render_products(self): - """To be implemented by renderer class. - This should return a list of RenderProducts. - Returns: - list: List of RenderProduct - """ - return [ - RenderProduct( - colorspace="sRGB", - view="ACES 1.0", - productName="" - ) - ] diff --git a/server_addon/blender/client/ayon_blender/api/icons/pyblish-32x32.png b/server_addon/blender/client/ayon_blender/api/icons/pyblish-32x32.png deleted file mode 100644 index b34e397e0bd502eb336f994f014a518198d93599..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 632 zcmV-;0*C#HP)?m3OVFzoDHzW14L zXLc9IkqG*a3_I>wqC(4bTV70aHK&_zb)S-or zdR!e?Gz01oa2+_3G`|NP#ua@8z5!o>X<#;9R|2bmL0|;f63ZT7V-l+d919^l%3Ar^ zg#3!SwovNY$J`#X`Bzud{=Sy+^`w3nIH_(b$uI|`d*DIZ+4=~EfmcB5%AW!Efyc4_ z377pl-+RGsR;{ENE2 zlz9SiqM-S_dY}^X1mao&fb*4_M}R@~`4Y_Us>><|h!DGM9;+<*qLEA%q6-GNXH<>i|*pjP~hX0nBH#FZ2qakcE>` z0W5F17Z?vA{OT}XF{!tbqt{SR^~5*hEygi&)Nu5GKn2{MR3FJp2!~ Sv8(t10000>> import bpy - >>> def compute(): - ... return 6 - ... - >>> bpy.ops.mesh.primitive_cube_add() - >>> cube = bpy.context.view_layer.objects.active - >>> imprint(cube, { - ... "regularString": "myFamily", - ... "computedValue": lambda: compute() - ... }) - ... - >>> cube['avalon']['computedValue'] - 6 - """ - - imprint_data = dict() - - for key, value in data.items(): - if value is None: - continue - - if callable(value): - # Support values evaluated at imprint - value = value() - - if not isinstance(value, (int, float, bool, str, list, dict)): - raise TypeError(f"Unsupported type: {type(value)}") - - imprint_data[key] = value - - pipeline.metadata_update(node, imprint_data) - - -def lsattr(attr: str, - value: Union[str, int, bool, List, Dict, None] = None) -> List: - r"""Return nodes matching `attr` and `value` - - Arguments: - attr: Name of Blender property - value: Value of attribute. If none - is provided, return all nodes with this attribute. - - Example: - >>> lsattr("id", "myId") - ... [bpy.data.objects["myNode"] - >>> lsattr("id") - ... [bpy.data.objects["myNode"], bpy.data.objects["myOtherNode"]] - - Returns: - list - """ - - return lsattrs({attr: value}) - - -def lsattrs(attrs: Dict) -> List: - r"""Return nodes with the given attribute(s). - - Arguments: - attrs: Name and value pairs of expected matches - - Example: - >>> lsattrs({"age": 5}) # Return nodes with an `age` of 5 - # Return nodes with both `age` and `color` of 5 and blue - >>> lsattrs({"age": 5, "color": "blue"}) - - Returns a list. - - """ - - # For now return all objects, not filtered by scene/collection/view_layer. - matches = set() - for coll in dir(bpy.data): - if not isinstance( - getattr(bpy.data, coll), - bpy.types.bpy_prop_collection, - ): - continue - for node in getattr(bpy.data, coll): - for attr, value in attrs.items(): - avalon_prop = node.get(pipeline.AVALON_PROPERTY) - if not avalon_prop: - continue - if (avalon_prop.get(attr) - and (value is None or avalon_prop.get(attr) == value)): - matches.add(node) - return list(matches) - - -def read(node: bpy.types.bpy_struct_meta_idprop): - """Return user-defined attributes from `node`""" - - data = dict(node.get(pipeline.AVALON_PROPERTY, {})) - - # Ignore hidden/internal data - data = { - key: value - for key, value in data.items() if not key.startswith("_") - } - - return data - - -def get_selected_collections(): - """ - Returns a list of the currently selected collections in the outliner. - - Raises: - RuntimeError: If the outliner cannot be found in the main Blender - window. - - Returns: - list: A list of `bpy.types.Collection` objects that are currently - selected in the outliner. - """ - window = bpy.context.window or bpy.context.window_manager.windows[0] - - try: - area = next( - area for area in window.screen.areas - if area.type == 'OUTLINER') - region = next( - region for region in area.regions - if region.type == 'WINDOW') - except StopIteration as e: - raise RuntimeError("Could not find outliner. An outliner space " - "must be in the main Blender window.") from e - - with bpy.context.temp_override( - window=window, - area=area, - region=region, - screen=window.screen - ): - ids = bpy.context.selected_ids - - return [id for id in ids if isinstance(id, bpy.types.Collection)] - - -def get_selection(include_collections: bool = False) -> List[bpy.types.Object]: - """ - Returns a list of selected objects in the current Blender scene. - - Args: - include_collections (bool, optional): Whether to include selected - collections in the result. Defaults to False. - - Returns: - List[bpy.types.Object]: A list of selected objects. - """ - selection = [obj for obj in bpy.context.scene.objects if obj.select_get()] - - if include_collections: - selection.extend(get_selected_collections()) - - return selection - - -@contextlib.contextmanager -def maintained_selection(): - r"""Maintain selection during context - - Example: - >>> with maintained_selection(): - ... # Modify selection - ... bpy.ops.object.select_all(action='DESELECT') - >>> # Selection restored - """ - - previous_selection = get_selection() - previous_active = bpy.context.view_layer.objects.active - try: - yield - finally: - # Clear the selection - for node in get_selection(): - node.select_set(state=False) - if previous_selection: - for node in previous_selection: - try: - node.select_set(state=True) - except ReferenceError: - # This could happen if a selected node was deleted during - # the context. - log.exception("Failed to reselect") - continue - try: - bpy.context.view_layer.objects.active = previous_active - except ReferenceError: - # This could happen if the active node was deleted during the - # context. - log.exception("Failed to set active object.") - - -@contextlib.contextmanager -def maintained_time(): - """Maintain current frame during context.""" - current_time = bpy.context.scene.frame_current - try: - yield - finally: - bpy.context.scene.frame_current = current_time - - -def get_all_parents(obj): - """Get all recursive parents of object. - - Arguments: - obj (bpy.types.Object): Object to get all parents for. - - Returns: - List[bpy.types.Object]: All parents of object - - """ - result = [] - while True: - obj = obj.parent - if not obj: - break - result.append(obj) - return result - - -def get_highest_root(objects): - """Get the highest object (the least parents) among the objects. - - If multiple objects have the same amount of parents (or no parents) the - first object found in the input iterable will be returned. - - Note that this will *not* return objects outside of the input list, as - such it will not return the root of node from a child node. It is purely - intended to find the highest object among a list of objects. To instead - get the root from one object use, e.g. `get_all_parents(obj)[-1]` - - Arguments: - objects (List[bpy.types.Object]): Objects to find the highest root in. - - Returns: - Optional[bpy.types.Object]: First highest root found or None if no - `bpy.types.Object` found in input list. - - """ - included_objects = {obj.name_full for obj in objects} - num_parents_to_obj = {} - for obj in objects: - if isinstance(obj, bpy.types.Object): - parents = get_all_parents(obj) - # included parents - parents = [parent for parent in parents if - parent.name_full in included_objects] - if not parents: - # A node without parents must be a highest root - return obj - - num_parents_to_obj.setdefault(len(parents), obj) - - if not num_parents_to_obj: - return - - minimum_parent = min(num_parents_to_obj) - return num_parents_to_obj[minimum_parent] diff --git a/server_addon/blender/client/ayon_blender/api/ops.py b/server_addon/blender/client/ayon_blender/api/ops.py deleted file mode 100644 index 7cf9600067..0000000000 --- a/server_addon/blender/client/ayon_blender/api/ops.py +++ /dev/null @@ -1,456 +0,0 @@ -"""Blender operators and menus for use with Avalon.""" - -import os -import sys -import platform -import time -import traceback -import collections -from pathlib import Path -from types import ModuleType -from typing import Dict, List, Optional, Union - -from qtpy import QtWidgets, QtCore - -import bpy -import bpy.utils.previews - -from ayon_core import style -from ayon_core.pipeline import get_current_folder_path, get_current_task_name -from ayon_core.tools.utils import host_tools - -from .workio import OpenFileCacher -from . import pipeline - -PREVIEW_COLLECTIONS: Dict = dict() - -# This seems like a good value to keep the Qt app responsive and doesn't slow -# down Blender. At least on macOS I the interface of Blender gets very laggy if -# you make it smaller. -TIMER_INTERVAL: float = 0.01 if platform.system() == "Windows" else 0.1 - - -def execute_function_in_main_thread(f): - """Decorator to move a function call into main thread items""" - def wrapper(*args, **kwargs): - mti = MainThreadItem(f, *args, **kwargs) - execute_in_main_thread(mti) - return wrapper - - -class BlenderApplication(QtWidgets.QApplication): - _instance = None - blender_windows = {} - - def __init__(self, *args, **kwargs): - super(BlenderApplication, self).__init__(*args, **kwargs) - self.setQuitOnLastWindowClosed(False) - - self.setStyleSheet(style.load_stylesheet()) - self.lastWindowClosed.connect(self.__class__.reset) - - @classmethod - def get_app(cls): - if cls._instance is None: - cls._instance = cls(sys.argv) - return cls._instance - - @classmethod - def reset(cls): - cls._instance = None - - @classmethod - def store_window(cls, identifier, window): - current_window = cls.get_window(identifier) - cls.blender_windows[identifier] = window - if current_window: - current_window.close() - # current_window.deleteLater() - - @classmethod - def get_window(cls, identifier): - return cls.blender_windows.get(identifier) - - -class MainThreadItem: - """Structure to store information about callback in main thread. - - Item should be used to execute callback in main thread which may be needed - for execution of Qt objects. - - Item store callback (callable variable), arguments and keyword arguments - for the callback. Item hold information about it's process. - """ - not_set = object() - sleep_time = 0.1 - - def __init__(self, callback, *args, **kwargs): - self.done = False - self.exception = self.not_set - self.result = self.not_set - self.callback = callback - self.args = args - self.kwargs = kwargs - - def execute(self): - """Execute callback and store its result. - - Method must be called from main thread. Item is marked as `done` - when callback execution finished. Store output of callback of exception - information when callback raises one. - """ - print("Executing process in main thread") - if self.done: - print("- item is already processed") - return - - callback = self.callback - args = self.args - kwargs = self.kwargs - print("Running callback: {}".format(str(callback))) - try: - result = callback(*args, **kwargs) - self.result = result - - except Exception: - self.exception = sys.exc_info() - - finally: - print("Done") - self.done = True - - def wait(self): - """Wait for result from main thread. - - This method stops current thread until callback is executed. - - Returns: - object: Output of callback. May be any type or object. - - Raises: - Exception: Reraise any exception that happened during callback - execution. - """ - while not self.done: - print(self.done) - time.sleep(self.sleep_time) - - if self.exception is self.not_set: - return self.result - raise self.exception - - -class GlobalClass: - app = None - main_thread_callbacks = collections.deque() - is_windows = platform.system().lower() == "windows" - - -def execute_in_main_thread(main_thead_item): - print("execute_in_main_thread") - GlobalClass.main_thread_callbacks.append(main_thead_item) - - -def _process_app_events() -> Optional[float]: - """Process the events of the Qt app if the window is still visible. - - If the app has any top level windows and at least one of them is visible - return the time after which this function should be run again. Else return - None, so the function is not run again and will be unregistered. - """ - while GlobalClass.main_thread_callbacks: - main_thread_item = GlobalClass.main_thread_callbacks.popleft() - main_thread_item.execute() - if main_thread_item.exception is not MainThreadItem.not_set: - _clc, val, tb = main_thread_item.exception - msg = str(val) - detail = "\n".join(traceback.format_exception(_clc, val, tb)) - dialog = QtWidgets.QMessageBox( - QtWidgets.QMessageBox.Warning, - "Error", - msg) - dialog.setMinimumWidth(500) - dialog.setDetailedText(detail) - dialog.exec_() - - # Refresh Manager - if GlobalClass.app: - manager = GlobalClass.app.get_window("WM_OT_avalon_manager") - if manager: - manager.refresh() - - if not GlobalClass.is_windows: - if OpenFileCacher.opening_file: - return TIMER_INTERVAL - - app = GlobalClass.app - if app._instance: - app.processEvents() - return TIMER_INTERVAL - return TIMER_INTERVAL - - -class LaunchQtApp(bpy.types.Operator): - """A Base class for operators to launch a Qt app.""" - - _app: QtWidgets.QApplication - _window = Union[QtWidgets.QDialog, ModuleType] - _tool_name: str = None - _init_args: Optional[List] = list() - _init_kwargs: Optional[Dict] = dict() - bl_idname: str = None - - def __init__(self): - if self.bl_idname is None: - raise NotImplementedError("Attribute `bl_idname` must be set!") - print(f"Initialising {self.bl_idname}...") - self._app = BlenderApplication.get_app() - GlobalClass.app = self._app - - if not bpy.app.timers.is_registered(_process_app_events): - bpy.app.timers.register( - _process_app_events, - persistent=True - ) - - def execute(self, context): - """Execute the operator. - - The child class must implement `execute()` where it only has to set - `self._window` to the desired Qt window and then simply run - `return super().execute(context)`. - `self._window` is expected to have a `show` method. - If the `show` method requires arguments, you can set `self._show_args` - and `self._show_kwargs`. `args` should be a list, `kwargs` a - dictionary. - """ - - if self._tool_name is None: - if self._window is None: - raise AttributeError("`self._window` is not set.") - - else: - window = self._app.get_window(self.bl_idname) - if window is None: - window = host_tools.get_tool_by_name(self._tool_name) - self._app.store_window(self.bl_idname, window) - self._window = window - - if not isinstance(self._window, (QtWidgets.QWidget, ModuleType)): - raise AttributeError( - "`window` should be a `QWidget or module`. Got: {}".format( - str(type(window)) - ) - ) - - self.before_window_show() - - def pull_to_front(window): - """Pull window forward to screen. - - If Window is minimized this will un-minimize, then it can be raised - and activated to the front. - """ - window.setWindowState( - (window.windowState() & ~QtCore.Qt.WindowMinimized) | - QtCore.Qt.WindowActive - ) - window.raise_() - window.activateWindow() - - if isinstance(self._window, ModuleType): - self._window.show() - pull_to_front(self._window) - - # Pull window to the front - window = None - if hasattr(self._window, "window"): - window = self._window.window - elif hasattr(self._window, "_window"): - window = self._window.window - - if window: - self._app.store_window(self.bl_idname, window) - - else: - origin_flags = self._window.windowFlags() - on_top_flags = origin_flags | QtCore.Qt.WindowStaysOnTopHint - self._window.setWindowFlags(on_top_flags) - self._window.show() - pull_to_front(self._window) - - # if on_top_flags != origin_flags: - # self._window.setWindowFlags(origin_flags) - # self._window.show() - - return {'FINISHED'} - - def before_window_show(self): - return - - -class LaunchCreator(LaunchQtApp): - """Launch Avalon Creator.""" - - bl_idname = "wm.avalon_creator" - bl_label = "Create..." - _tool_name = "creator" - - def before_window_show(self): - self._window.refresh() - - def execute(self, context): - host_tools.show_publisher(tab="create") - return {"FINISHED"} - - -class LaunchLoader(LaunchQtApp): - """Launch AYON Loader.""" - - bl_idname = "wm.avalon_loader" - bl_label = "Load..." - _tool_name = "loader" - - -class LaunchPublisher(LaunchQtApp): - """Launch Avalon Publisher.""" - - bl_idname = "wm.avalon_publisher" - bl_label = "Publish..." - - def execute(self, context): - host_tools.show_publisher(tab="publish") - return {"FINISHED"} - - -class LaunchManager(LaunchQtApp): - """Launch Avalon Manager.""" - - bl_idname = "wm.avalon_manager" - bl_label = "Manage..." - _tool_name = "sceneinventory" - - -class LaunchLibrary(LaunchQtApp): - """Launch Library Loader.""" - - bl_idname = "wm.library_loader" - bl_label = "Library..." - _tool_name = "libraryloader" - - -class LaunchWorkFiles(LaunchQtApp): - """Launch Avalon Work Files.""" - - bl_idname = "wm.avalon_workfiles" - bl_label = "Work Files..." - _tool_name = "workfiles" - - def execute(self, context): - return super().execute(context) - - -class SetFrameRange(bpy.types.Operator): - bl_idname = "wm.ayon_set_frame_range" - bl_label = "Set Frame Range" - - def execute(self, context): - data = pipeline.get_folder_attributes() - pipeline.set_frame_range(data) - return {"FINISHED"} - - -class SetResolution(bpy.types.Operator): - bl_idname = "wm.ayon_set_resolution" - bl_label = "Set Resolution" - - def execute(self, context): - data = pipeline.get_folder_attributes() - pipeline.set_resolution(data) - return {"FINISHED"} - - -class TOPBAR_MT_avalon(bpy.types.Menu): - """Avalon menu.""" - - bl_idname = "TOPBAR_MT_avalon" - bl_label = os.environ.get("AYON_MENU_LABEL") - - def draw(self, context): - """Draw the menu in the UI.""" - - layout = self.layout - - pcoll = PREVIEW_COLLECTIONS.get("avalon") - if pcoll: - pyblish_menu_icon = pcoll["pyblish_menu_icon"] - pyblish_menu_icon_id = pyblish_menu_icon.icon_id - else: - pyblish_menu_icon_id = 0 - - folder_path = get_current_folder_path() - task_name = get_current_task_name() - context_label = f"{folder_path}, {task_name}" - context_label_item = layout.row() - context_label_item.operator( - LaunchWorkFiles.bl_idname, text=context_label - ) - context_label_item.enabled = False - layout.separator() - layout.operator(LaunchCreator.bl_idname, text="Create...") - layout.operator(LaunchLoader.bl_idname, text="Load...") - layout.operator( - LaunchPublisher.bl_idname, - text="Publish...", - icon_value=pyblish_menu_icon_id, - ) - layout.operator(LaunchManager.bl_idname, text="Manage...") - layout.operator(LaunchLibrary.bl_idname, text="Library...") - layout.separator() - layout.operator(SetFrameRange.bl_idname, text="Set Frame Range") - layout.operator(SetResolution.bl_idname, text="Set Resolution") - layout.separator() - layout.operator(LaunchWorkFiles.bl_idname, text="Work Files...") - - -def draw_avalon_menu(self, context): - """Draw the Avalon menu in the top bar.""" - - self.layout.menu(TOPBAR_MT_avalon.bl_idname) - - -classes = [ - LaunchCreator, - LaunchLoader, - LaunchPublisher, - LaunchManager, - LaunchLibrary, - LaunchWorkFiles, - SetFrameRange, - SetResolution, - TOPBAR_MT_avalon, -] - - -def register(): - "Register the operators and menu." - - pcoll = bpy.utils.previews.new() - pyblish_icon_file = Path(__file__).parent / "icons" / "pyblish-32x32.png" - pcoll.load("pyblish_menu_icon", str(pyblish_icon_file.absolute()), 'IMAGE') - PREVIEW_COLLECTIONS["avalon"] = pcoll - - BlenderApplication.get_app() - for cls in classes: - bpy.utils.register_class(cls) - bpy.types.TOPBAR_MT_editor_menus.append(draw_avalon_menu) - - -def unregister(): - """Unregister the operators and menu.""" - - pcoll = PREVIEW_COLLECTIONS.pop("avalon") - bpy.utils.previews.remove(pcoll) - bpy.types.TOPBAR_MT_editor_menus.remove(draw_avalon_menu) - for cls in reversed(classes): - bpy.utils.unregister_class(cls) diff --git a/server_addon/blender/client/ayon_blender/api/pipeline.py b/server_addon/blender/client/ayon_blender/api/pipeline.py deleted file mode 100644 index d2ff129a48..0000000000 --- a/server_addon/blender/client/ayon_blender/api/pipeline.py +++ /dev/null @@ -1,574 +0,0 @@ -import os -import sys -import traceback -from typing import Callable, Dict, Iterator, List, Optional - -import bpy - -import pyblish.api -import ayon_api - -from ayon_core.host import ( - HostBase, - IWorkfileHost, - IPublishHost, - ILoadHost -) -from ayon_core.pipeline import ( - schema, - get_current_project_name, - get_current_folder_path, - register_loader_plugin_path, - register_creator_plugin_path, - deregister_loader_plugin_path, - deregister_creator_plugin_path, - AVALON_CONTAINER_ID, - AYON_CONTAINER_ID, -) -from ayon_core.lib import ( - Logger, - register_event_callback, - emit_event -) -from ayon_core.settings import get_project_settings -from ayon_blender import BLENDER_ADDON_ROOT - -from . import lib -from . import ops - -from .workio import ( - open_file, - save_file, - current_file, - has_unsaved_changes, - file_extensions, - work_root, -) - -PLUGINS_DIR = os.path.join(BLENDER_ADDON_ROOT, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") -LOAD_PATH = os.path.join(PLUGINS_DIR, "load") -CREATE_PATH = os.path.join(PLUGINS_DIR, "create") - -ORIGINAL_EXCEPTHOOK = sys.excepthook - -AVALON_INSTANCES = "AVALON_INSTANCES" -AVALON_CONTAINERS = "AVALON_CONTAINERS" -AVALON_PROPERTY = 'avalon' -IS_HEADLESS = bpy.app.background - -log = Logger.get_logger(__name__) - - -class BlenderHost(HostBase, IWorkfileHost, IPublishHost, ILoadHost): - name = "blender" - - def install(self): - """Override install method from HostBase. - Install Blender host functionality.""" - install() - - def get_containers(self) -> Iterator: - """List containers from active Blender scene.""" - return ls() - - def get_workfile_extensions(self) -> List[str]: - """Override get_workfile_extensions method from IWorkfileHost. - Get workfile possible extensions. - - Returns: - List[str]: Workfile extensions. - """ - return file_extensions() - - def save_workfile(self, dst_path: str = None): - """Override save_workfile method from IWorkfileHost. - Save currently opened workfile. - - Args: - dst_path (str): Where the current scene should be saved. Or use - current path if `None` is passed. - """ - save_file(dst_path if dst_path else bpy.data.filepath) - - def open_workfile(self, filepath: str): - """Override open_workfile method from IWorkfileHost. - Open workfile at specified filepath in the host. - - Args: - filepath (str): Path to workfile. - """ - open_file(filepath) - - def get_current_workfile(self) -> str: - """Override get_current_workfile method from IWorkfileHost. - Retrieve currently opened workfile path. - - Returns: - str: Path to currently opened workfile. - """ - return current_file() - - def workfile_has_unsaved_changes(self) -> bool: - """Override wokfile_has_unsaved_changes method from IWorkfileHost. - Returns True if opened workfile has no unsaved changes. - - Returns: - bool: True if scene is saved and False if it has unsaved - modifications. - """ - return has_unsaved_changes() - - def work_root(self, session) -> str: - """Override work_root method from IWorkfileHost. - Modify workdir per host. - - Args: - session (dict): Session context data. - - Returns: - str: Path to new workdir. - """ - return work_root(session) - - def get_context_data(self) -> dict: - """Override abstract method from IPublishHost. - Get global data related to creation-publishing from workfile. - - Returns: - dict: Context data stored using 'update_context_data'. - """ - property = bpy.context.scene.get(AVALON_PROPERTY) - if property: - return property.to_dict() - return {} - - def update_context_data(self, data: dict, changes: dict): - """Override abstract method from IPublishHost. - Store global context data to workfile. - - Args: - data (dict): New data as are. - changes (dict): Only data that has been changed. Each value has - tuple with '(, )' value. - """ - bpy.context.scene[AVALON_PROPERTY] = data - - -def pype_excepthook_handler(*args): - traceback.print_exception(*args) - - -def install(): - """Install Blender configuration for Avalon.""" - sys.excepthook = pype_excepthook_handler - - pyblish.api.register_host("blender") - pyblish.api.register_plugin_path(str(PUBLISH_PATH)) - - register_loader_plugin_path(str(LOAD_PATH)) - register_creator_plugin_path(str(CREATE_PATH)) - - lib.append_user_scripts() - lib.set_app_templates_path() - - register_event_callback("new", on_new) - register_event_callback("open", on_open) - - _register_callbacks() - _register_events() - - if not IS_HEADLESS: - ops.register() - - -def uninstall(): - """Uninstall Blender configuration for Avalon.""" - sys.excepthook = ORIGINAL_EXCEPTHOOK - - pyblish.api.deregister_host("blender") - pyblish.api.deregister_plugin_path(str(PUBLISH_PATH)) - - deregister_loader_plugin_path(str(LOAD_PATH)) - deregister_creator_plugin_path(str(CREATE_PATH)) - - if not IS_HEADLESS: - ops.unregister() - - -def show_message(title, message): - from ayon_core.tools.utils import show_message_dialog - from .ops import BlenderApplication - - BlenderApplication.get_app() - - show_message_dialog( - title=title, - message=message, - level="warning") - - -def message_window(title, message): - from .ops import ( - MainThreadItem, - execute_in_main_thread, - _process_app_events - ) - - mti = MainThreadItem(show_message, title, message) - execute_in_main_thread(mti) - _process_app_events() - - -def get_folder_attributes(): - project_name = get_current_project_name() - folder_path = get_current_folder_path() - folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) - - return folder_entity["attrib"] - - -def set_frame_range(data): - scene = bpy.context.scene - - # Default scene settings - frameStart = scene.frame_start - frameEnd = scene.frame_end - fps = scene.render.fps / scene.render.fps_base - - if not data: - return - - if data.get("frameStart"): - frameStart = data.get("frameStart") - if data.get("frameEnd"): - frameEnd = data.get("frameEnd") - if data.get("fps"): - fps = data.get("fps") - - scene.frame_start = frameStart - scene.frame_end = frameEnd - scene.render.fps = round(fps) - scene.render.fps_base = round(fps) / fps - - -def set_resolution(data): - scene = bpy.context.scene - - # Default scene settings - resolution_x = scene.render.resolution_x - resolution_y = scene.render.resolution_y - - if not data: - return - - if data.get("resolutionWidth"): - resolution_x = data.get("resolutionWidth") - if data.get("resolutionHeight"): - resolution_y = data.get("resolutionHeight") - - scene.render.resolution_x = resolution_x - scene.render.resolution_y = resolution_y - - -def on_new(): - project = os.environ.get("AYON_PROJECT_NAME") - settings = get_project_settings(project).get("blender") - - set_resolution_startup = settings.get("set_resolution_startup") - set_frames_startup = settings.get("set_frames_startup") - - data = get_folder_attributes() - - if set_resolution_startup: - set_resolution(data) - if set_frames_startup: - set_frame_range(data) - - unit_scale_settings = settings.get("unit_scale_settings") - unit_scale_enabled = unit_scale_settings.get("enabled") - if unit_scale_enabled: - unit_scale = unit_scale_settings.get("base_file_unit_scale") - bpy.context.scene.unit_settings.scale_length = unit_scale - - -def on_open(): - project = os.environ.get("AYON_PROJECT_NAME") - settings = get_project_settings(project).get("blender") - - set_resolution_startup = settings.get("set_resolution_startup") - set_frames_startup = settings.get("set_frames_startup") - - data = get_folder_attributes() - - if set_resolution_startup: - set_resolution(data) - if set_frames_startup: - set_frame_range(data) - - unit_scale_settings = settings.get("unit_scale_settings") - unit_scale_enabled = unit_scale_settings.get("enabled") - apply_on_opening = unit_scale_settings.get("apply_on_opening") - if unit_scale_enabled and apply_on_opening: - unit_scale = unit_scale_settings.get("base_file_unit_scale") - prev_unit_scale = bpy.context.scene.unit_settings.scale_length - - if unit_scale != prev_unit_scale: - bpy.context.scene.unit_settings.scale_length = unit_scale - - message_window( - "Base file unit scale changed", - "Base file unit scale changed to match the project settings.") - - -@bpy.app.handlers.persistent -def _on_save_pre(*args): - emit_event("before.save") - - -@bpy.app.handlers.persistent -def _on_save_post(*args): - emit_event("save") - - -@bpy.app.handlers.persistent -def _on_load_post(*args): - # Detect new file or opening an existing file - if bpy.data.filepath: - # Likely this was an open operation since it has a filepath - emit_event("open") - else: - emit_event("new") - - ops.OpenFileCacher.post_load() - - -def _register_callbacks(): - """Register callbacks for certain events.""" - def _remove_handler(handlers: List, callback: Callable): - """Remove the callback from the given handler list.""" - - try: - handlers.remove(callback) - except ValueError: - pass - - # TODO (jasper): implement on_init callback? - - # Be sure to remove existig ones first. - _remove_handler(bpy.app.handlers.save_pre, _on_save_pre) - _remove_handler(bpy.app.handlers.save_post, _on_save_post) - _remove_handler(bpy.app.handlers.load_post, _on_load_post) - - bpy.app.handlers.save_pre.append(_on_save_pre) - bpy.app.handlers.save_post.append(_on_save_post) - bpy.app.handlers.load_post.append(_on_load_post) - - log.info("Installed event handler _on_save_pre...") - log.info("Installed event handler _on_save_post...") - log.info("Installed event handler _on_load_post...") - - -def _on_task_changed(): - """Callback for when the task in the context is changed.""" - - # TODO (jasper): Blender has no concept of projects or workspace. - # It would be nice to override 'bpy.ops.wm.open_mainfile' so it takes the - # workdir as starting directory. But I don't know if that is possible. - # Another option would be to create a custom 'File Selector' and add the - # `directory` attribute, so it opens in that directory (does it?). - # https://docs.blender.org/api/blender2.8/bpy.types.Operator.html#calling-a-file-selector - # https://docs.blender.org/api/blender2.8/bpy.types.WindowManager.html#bpy.types.WindowManager.fileselect_add - workdir = os.getenv("AYON_WORKDIR") - log.debug("New working directory: %s", workdir) - - -def _register_events(): - """Install callbacks for specific events.""" - - register_event_callback("taskChanged", _on_task_changed) - log.info("Installed event callback for 'taskChanged'...") - - -def _discover_gui() -> Optional[Callable]: - """Return the most desirable of the currently registered GUIs""" - - # Prefer last registered - guis = reversed(pyblish.api.registered_guis()) - - for gui in guis: - try: - gui = __import__(gui).show - except (ImportError, AttributeError): - continue - else: - return gui - - return None - - -def add_to_avalon_container(container: bpy.types.Collection): - """Add the container to the Avalon container.""" - - avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) - if not avalon_container: - avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) - - # Link the container to the scene so it's easily visible to the artist - # and can be managed easily. Otherwise it's only found in "Blender - # File" view and it will be removed by Blenders garbage collection, - # unless you set a 'fake user'. - bpy.context.scene.collection.children.link(avalon_container) - - avalon_container.children.link(container) - - # Disable Avalon containers for the view layers. - for view_layer in bpy.context.scene.view_layers: - for child in view_layer.layer_collection.children: - if child.collection == avalon_container: - child.exclude = True - - -def metadata_update(node: bpy.types.bpy_struct_meta_idprop, data: Dict): - """Imprint the node with metadata. - - Existing metadata will be updated. - """ - - if not node.get(AVALON_PROPERTY): - node[AVALON_PROPERTY] = dict() - for key, value in data.items(): - if value is None: - continue - node[AVALON_PROPERTY][key] = value - - -def containerise(name: str, - namespace: str, - nodes: List, - context: Dict, - loader: Optional[str] = None, - suffix: Optional[str] = "CON") -> bpy.types.Collection: - """Bundle `nodes` into an assembly and imprint it with metadata - - Containerisation enables a tracking of version, author and origin - for loaded assets. - - Arguments: - name: Name of resulting assembly - namespace: Namespace under which to host container - nodes: Long names of nodes to containerise - context: Asset information - loader: Name of loader used to produce this container. - suffix: Suffix of container, defaults to `_CON`. - - Returns: - The container assembly - - """ - - node_name = f"{context['folder']['name']}_{name}" - if namespace: - node_name = f"{namespace}:{node_name}" - if suffix: - node_name = f"{node_name}_{suffix}" - container = bpy.data.collections.new(name=node_name) - # Link the children nodes - for obj in nodes: - container.objects.link(obj) - - data = { - "schema": "openpype:container-2.0", - "id": AVALON_CONTAINER_ID, - "name": name, - "namespace": namespace or '', - "loader": str(loader), - "representation": context["representation"]["id"], - } - - metadata_update(container, data) - add_to_avalon_container(container) - - return container - - -def containerise_existing( - container: bpy.types.Collection, - name: str, - namespace: str, - context: Dict, - loader: Optional[str] = None, - suffix: Optional[str] = "CON") -> bpy.types.Collection: - """Imprint or update container with metadata. - - Arguments: - name: Name of resulting assembly - namespace: Namespace under which to host container - context: Asset information - loader: Name of loader used to produce this container. - suffix: Suffix of container, defaults to `_CON`. - - Returns: - The container assembly - """ - - node_name = container.name - if suffix: - node_name = f"{node_name}_{suffix}" - container.name = node_name - data = { - "schema": "openpype:container-2.0", - "id": AVALON_CONTAINER_ID, - "name": name, - "namespace": namespace or '', - "loader": str(loader), - "representation": context["representation"]["id"], - } - - metadata_update(container, data) - add_to_avalon_container(container) - - return container - - -def parse_container(container: bpy.types.Collection, - validate: bool = True) -> Dict: - """Return the container node's full container data. - - Args: - container: A container node name. - validate: turn the validation for the container on or off - - Returns: - The container schema data for this container node. - - """ - - data = lib.read(container) - - # Append transient data - data["objectName"] = container.name - - if validate: - schema.validate(data) - - return data - - -def ls() -> Iterator: - """List containers from active Blender scene. - - This is the host-equivalent of api.ls(), but instead of listing assets on - disk, it lists assets already loaded in Blender; once loaded they are - called containers. - """ - - for id_type in {AYON_CONTAINER_ID, AVALON_CONTAINER_ID}: - for container in lib.lsattr("id", id_type): - yield parse_container(container) - - -def publish(): - """Shorthand to publish from within host.""" - - return pyblish.util.publish() diff --git a/server_addon/blender/client/ayon_blender/api/plugin.py b/server_addon/blender/client/ayon_blender/api/plugin.py deleted file mode 100644 index e72bf20287..0000000000 --- a/server_addon/blender/client/ayon_blender/api/plugin.py +++ /dev/null @@ -1,542 +0,0 @@ -"""Shared functionality for pipeline plugins for Blender.""" - -import itertools -from pathlib import Path -from typing import Dict, List, Optional - -import pyblish.api -import bpy - -from ayon_core.pipeline import ( - Creator, - CreatedInstance, - LoaderPlugin, - AVALON_INSTANCE_ID, - AYON_INSTANCE_ID, -) -from ayon_core.pipeline.publish import Extractor -from ayon_core.lib import BoolDef - -from .pipeline import ( - AVALON_CONTAINERS, - AVALON_INSTANCES, - AVALON_PROPERTY, -) -from .ops import ( - MainThreadItem, - execute_in_main_thread -) -from .lib import imprint - -VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx", - ".usd", ".usdc", ".usda"] - - -def prepare_scene_name( - folder_name: str, product_name: str, namespace: Optional[str] = None -) -> str: - """Return a consistent name for an asset.""" - name = f"{folder_name}" - if namespace: - name = f"{name}_{namespace}" - name = f"{name}_{product_name}" - - # Blender name for a collection or object cannot be longer than 63 - # characters. If the name is longer, it will raise an error. - if len(name) > 63: - raise ValueError(f"Scene name '{name}' would be too long.") - - return name - - -def get_unique_number( - folder_name: str, product_name: str -) -> str: - """Return a unique number based on the folder name.""" - avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) - if not avalon_container: - return "01" - # Check the names of both object and collection containers - obj_asset_groups = avalon_container.objects - obj_group_names = { - c.name for c in obj_asset_groups - if c.type == 'EMPTY' and c.get(AVALON_PROPERTY)} - coll_asset_groups = avalon_container.children - coll_group_names = { - c.name for c in coll_asset_groups - if c.get(AVALON_PROPERTY)} - container_names = obj_group_names.union(coll_group_names) - count = 1 - name = f"{folder_name}_{count:0>2}_{product_name}" - while name in container_names: - count += 1 - name = f"{folder_name}_{count:0>2}_{product_name}" - return f"{count:0>2}" - - -def prepare_data(data, container_name=None): - name = data.name - local_data = data.make_local() - if container_name: - local_data.name = f"{container_name}:{name}" - else: - local_data.name = f"{name}" - return local_data - - -def create_blender_context(active: Optional[bpy.types.Object] = None, - selected: Optional[bpy.types.Object] = None, - window: Optional[bpy.types.Window] = None): - """Create a new Blender context. If an object is passed as - parameter, it is set as selected and active. - """ - - if not isinstance(selected, list): - selected = [selected] - - override_context = bpy.context.copy() - - windows = [window] if window else bpy.context.window_manager.windows - - for win in windows: - for area in win.screen.areas: - if area.type == 'VIEW_3D': - for region in area.regions: - if region.type == 'WINDOW': - override_context['window'] = win - override_context['screen'] = win.screen - override_context['area'] = area - override_context['region'] = region - override_context['scene'] = bpy.context.scene - override_context['active_object'] = active - override_context['selected_objects'] = selected - return override_context - raise Exception("Could not create a custom Blender context.") - - -def get_parent_collection(collection): - """Get the parent of the input collection""" - check_list = [bpy.context.scene.collection] - - for c in check_list: - if collection.name in c.children.keys(): - return c - check_list.extend(c.children) - - return None - - -def get_local_collection_with_name(name): - for collection in bpy.data.collections: - if collection.name == name and collection.library is None: - return collection - return None - - -def deselect_all(): - """Deselect all objects in the scene. - - Blender gives context error if trying to deselect object that it isn't - in object mode. - """ - modes = [] - active = bpy.context.view_layer.objects.active - - for obj in bpy.data.objects: - if obj.mode != 'OBJECT': - modes.append((obj, obj.mode)) - bpy.context.view_layer.objects.active = obj - context_override = create_blender_context(active=obj) - with bpy.context.temp_override(**context_override): - bpy.ops.object.mode_set(mode='OBJECT') - - context_override = create_blender_context() - with bpy.context.temp_override(**context_override): - bpy.ops.object.select_all(action='DESELECT') - - for p in modes: - bpy.context.view_layer.objects.active = p[0] - context_override = create_blender_context(active=p[0]) - with bpy.context.temp_override(**context_override): - bpy.ops.object.mode_set(mode=p[1]) - - bpy.context.view_layer.objects.active = active - - -class BlenderInstancePlugin(pyblish.api.InstancePlugin): - settings_category = "blender" - - -class BlenderContextPlugin(pyblish.api.ContextPlugin): - settings_category = "blender" - - -class BlenderExtractor(Extractor): - settings_category = "blender" - - -class BlenderCreator(Creator): - """Base class for Blender Creator plug-ins.""" - defaults = ['Main'] - - settings_category = "blender" - create_as_asset_group = False - - @staticmethod - def cache_instance_data(shared_data): - """Cache instances for Creators shared data. - - Create `blender_cached_instances` key when needed in shared data and - fill it with all collected instances from the scene under its - respective creator identifiers. - - If legacy instances are detected in the scene, create - `blender_cached_legacy_instances` key and fill it with - all legacy products from this family as a value. # key or value? - - Args: - shared_data(Dict[str, Any]): Shared data. - - """ - if not shared_data.get('blender_cached_instances'): - cache = {} - cache_legacy = {} - - avalon_instances = bpy.data.collections.get(AVALON_INSTANCES) - avalon_instance_objs = ( - avalon_instances.objects if avalon_instances else [] - ) - - for obj_or_col in itertools.chain( - avalon_instance_objs, - bpy.data.collections - ): - avalon_prop = obj_or_col.get(AVALON_PROPERTY, {}) - if not avalon_prop: - continue - - if avalon_prop.get('id') not in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - }: - continue - - creator_id = avalon_prop.get('creator_identifier') - if creator_id: - # Creator instance - cache.setdefault(creator_id, []).append(obj_or_col) - else: - family = avalon_prop.get('family') - if family: - # Legacy creator instance - cache_legacy.setdefault(family, []).append(obj_or_col) - - shared_data["blender_cached_instances"] = cache - shared_data["blender_cached_legacy_instances"] = cache_legacy - - return shared_data - - def create( - self, product_name: str, instance_data: dict, pre_create_data: dict - ): - """Override abstract method from Creator. - Create new instance and store it. - - Args: - product_name (str): Product name of created instance. - instance_data (dict): Instance base data. - pre_create_data (dict): Data based on pre creation attributes. - Those may affect how creator works. - """ - # Get Instance Container or create it if it does not exist - instances = bpy.data.collections.get(AVALON_INSTANCES) - if not instances: - instances = bpy.data.collections.new(name=AVALON_INSTANCES) - bpy.context.scene.collection.children.link(instances) - - # Create asset group - folder_name = instance_data["folderPath"].split("/")[-1] - - name = prepare_scene_name(folder_name, product_name) - if self.create_as_asset_group: - # Create instance as empty - instance_node = bpy.data.objects.new(name=name, object_data=None) - instance_node.empty_display_type = 'SINGLE_ARROW' - instances.objects.link(instance_node) - else: - # Create instance collection - instance_node = bpy.data.collections.new(name=name) - instances.children.link(instance_node) - - self.set_instance_data(product_name, instance_data) - - instance = CreatedInstance( - self.product_type, product_name, instance_data, self - ) - instance.transient_data["instance_node"] = instance_node - self._add_instance_to_context(instance) - - imprint(instance_node, instance_data) - - return instance_node - - def collect_instances(self): - """Override abstract method from BlenderCreator. - Collect existing instances related to this creator plugin.""" - - # Cache instances in shared data - self.cache_instance_data(self.collection_shared_data) - - # Get cached instances - cached_instances = self.collection_shared_data.get( - "blender_cached_instances" - ) - if not cached_instances: - return - - # Process only instances that were created by this creator - for instance_node in cached_instances.get(self.identifier, []): - property = instance_node.get(AVALON_PROPERTY) - # Create instance object from existing data - instance = CreatedInstance.from_existing( - instance_data=property.to_dict(), - creator=self - ) - instance.transient_data["instance_node"] = instance_node - - # Add instance to create context - self._add_instance_to_context(instance) - - def update_instances(self, update_list): - """Override abstract method from BlenderCreator. - Store changes of existing instances so they can be recollected. - - Args: - update_list(List[UpdateData]): Changed instances - and their changes, as a list of tuples. - """ - - for created_instance, changes in update_list: - data = created_instance.data_to_store() - node = created_instance.transient_data["instance_node"] - if not node: - # We can't update if we don't know the node - self.log.error( - f"Unable to update instance {created_instance} " - f"without instance node." - ) - return - - # Rename the instance node in the scene if product - # or folder changed. - # Do not rename the instance if the family is workfile, as the - # workfile instance is included in the AVALON_CONTAINER collection. - if ( - "productName" in changes.changed_keys - or "folderPath" in changes.changed_keys - ) and created_instance.product_type != "workfile": - folder_name = data["folderPath"].split("/")[-1] - name = prepare_scene_name( - folder_name, data["productName"] - ) - node.name = name - - imprint(node, data) - - def remove_instances(self, instances: List[CreatedInstance]): - - for instance in instances: - node = instance.transient_data["instance_node"] - - if isinstance(node, bpy.types.Collection): - for children in node.children_recursive: - if isinstance(children, bpy.types.Collection): - bpy.data.collections.remove(children) - else: - bpy.data.objects.remove(children) - - bpy.data.collections.remove(node) - elif isinstance(node, bpy.types.Object): - bpy.data.objects.remove(node) - - self._remove_instance_from_context(instance) - - def set_instance_data( - self, - product_name: str, - instance_data: dict - ): - """Fill instance data with required items. - - Args: - product_name(str): Product name of created instance. - instance_data(dict): Instance base data. - instance_node(bpy.types.ID): Instance node in blender scene. - """ - if not instance_data: - instance_data = {} - - instance_data.update( - { - "id": AVALON_INSTANCE_ID, - "creator_identifier": self.identifier, - "productName": product_name, - } - ) - - def get_pre_create_attr_defs(self): - return [ - BoolDef("use_selection", - label="Use selection", - default=True) - ] - - -class BlenderLoader(LoaderPlugin): - """A basic AssetLoader for Blender - - This will implement the basic logic for linking/appending assets - into another Blender scene. - - The `update` method should be implemented by a sub-class, because - it's different for different types (e.g. model, rig, animation, - etc.). - """ - settings_category = "blender" - - @staticmethod - def _get_instance_empty(instance_name: str, nodes: List) -> Optional[bpy.types.Object]: - """Get the 'instance empty' that holds the collection instance.""" - for node in nodes: - if not isinstance(node, bpy.types.Object): - continue - if (node.type == 'EMPTY' and node.instance_type == 'COLLECTION' - and node.instance_collection and node.name == instance_name): - return node - return None - - @staticmethod - def _get_instance_collection(instance_name: str, nodes: List) -> Optional[bpy.types.Collection]: - """Get the 'instance collection' (container) for this asset.""" - for node in nodes: - if not isinstance(node, bpy.types.Collection): - continue - if node.name == instance_name: - return node - return None - - @staticmethod - def _get_library_from_container(container: bpy.types.Collection) -> bpy.types.Library: - """Find the library file from the container. - - It traverses the objects from this collection, checks if there is only - 1 library from which the objects come from and returns the library. - - Warning: - No nested collections are supported at the moment! - """ - assert not container.children, "Nested collections are not supported." - assert container.objects, "The collection doesn't contain any objects." - libraries = set() - for obj in container.objects: - assert obj.library, f"'{obj.name}' is not linked." - libraries.add(obj.library) - - assert len( - libraries) == 1, "'{container.name}' contains objects from more then 1 library." - - return list(libraries)[0] - - def process_asset(self, - context: dict, - name: str, - namespace: Optional[str] = None, - options: Optional[Dict] = None): - """Must be implemented by a sub-class""" - raise NotImplementedError("Must be implemented by a sub-class") - - def load(self, - context: dict, - name: Optional[str] = None, - namespace: Optional[str] = None, - options: Optional[Dict] = None) -> Optional[bpy.types.Collection]: - """ Run the loader on Blender main thread""" - mti = MainThreadItem(self._load, context, name, namespace, options) - execute_in_main_thread(mti) - - def _load(self, - context: dict, - name: Optional[str] = None, - namespace: Optional[str] = None, - options: Optional[Dict] = None - ) -> Optional[bpy.types.Collection]: - """Load asset via database - - Arguments: - context: Full parenthood of representation to load - name: Use pre-defined name - namespace: Use pre-defined namespace - options: Additional settings dictionary - """ - # TODO: make it possible to add the asset several times by - # just re-using the collection - filepath = self.filepath_from_context(context) - assert Path(filepath).exists(), f"{filepath} doesn't exist." - - folder_name = context["folder"]["name"] - product_name = context["product"]["name"] - unique_number = get_unique_number( - folder_name, product_name - ) - namespace = namespace or f"{folder_name}_{unique_number}" - name = name or prepare_scene_name( - folder_name, product_name, unique_number - ) - - nodes = self.process_asset( - context=context, - name=name, - namespace=namespace, - options=options, - ) - - # Only containerise if anything was loaded by the Loader. - if not nodes: - return None - - # Only containerise if it's not already a collection from a .blend file. - # representation = context["representation"]["name"] - # if representation != "blend": - # from ayon_blender.api.pipeline import containerise - # return containerise( - # name=name, - # namespace=namespace, - # nodes=nodes, - # context=context, - # loader=self.__class__.__name__, - # ) - - # folder_name = context["folder"]["name"] - # product_name = context["product"]["name"] - # instance_name = prepare_scene_name( - # folder_name, product_name, unique_number - # ) + '_CON' - - # return self._get_instance_collection(instance_name, nodes) - - def exec_update(self, container: Dict, context: Dict): - """Must be implemented by a sub-class""" - raise NotImplementedError("Must be implemented by a sub-class") - - def update(self, container: Dict, context: Dict): - """ Run the update on Blender main thread""" - mti = MainThreadItem(self.exec_update, container, context) - execute_in_main_thread(mti) - - def exec_remove(self, container: Dict) -> bool: - """Must be implemented by a sub-class""" - raise NotImplementedError("Must be implemented by a sub-class") - - def remove(self, container: Dict) -> bool: - """ Run the remove on Blender main thread""" - mti = MainThreadItem(self.exec_remove, container) - execute_in_main_thread(mti) diff --git a/server_addon/blender/client/ayon_blender/api/render_lib.py b/server_addon/blender/client/ayon_blender/api/render_lib.py deleted file mode 100644 index 91913f7913..0000000000 --- a/server_addon/blender/client/ayon_blender/api/render_lib.py +++ /dev/null @@ -1,364 +0,0 @@ -from pathlib import Path - -import bpy - -from ayon_core.settings import get_project_settings -from ayon_core.pipeline import get_current_project_name - - -def get_default_render_folder(settings): - """Get default render folder from blender settings.""" - - return (settings["blender"] - ["RenderSettings"] - ["default_render_image_folder"]) - - -def get_aov_separator(settings): - """Get aov separator from blender settings.""" - - aov_sep = (settings["blender"] - ["RenderSettings"] - ["aov_separator"]) - - if aov_sep == "dash": - return "-" - elif aov_sep == "underscore": - return "_" - elif aov_sep == "dot": - return "." - else: - raise ValueError(f"Invalid aov separator: {aov_sep}") - - -def get_image_format(settings): - """Get image format from blender settings.""" - - return (settings["blender"] - ["RenderSettings"] - ["image_format"]) - - -def get_multilayer(settings): - """Get multilayer from blender settings.""" - - return (settings["blender"] - ["RenderSettings"] - ["multilayer_exr"]) - - -def get_renderer(settings): - """Get renderer from blender settings.""" - - return (settings["blender"] - ["RenderSettings"] - ["renderer"]) - - -def get_compositing(settings): - """Get compositing from blender settings.""" - - return (settings["blender"] - ["RenderSettings"] - ["compositing"]) - - -def get_render_product(output_path, name, aov_sep): - """ - Generate the path to the render product. Blender interprets the `#` - as the frame number, when it renders. - - Args: - file_path (str): The path to the blender scene. - render_folder (str): The render folder set in settings. - file_name (str): The name of the blender scene. - instance (pyblish.api.Instance): The instance to publish. - ext (str): The image format to render. - """ - filepath = output_path / name.lstrip("/") - render_product = f"{filepath}{aov_sep}beauty.####" - render_product = render_product.replace("\\", "/") - - return render_product - - -def set_render_format(ext, multilayer): - # Set Blender to save the file with the right extension - bpy.context.scene.render.use_file_extension = True - - image_settings = bpy.context.scene.render.image_settings - - if ext == "exr": - image_settings.file_format = ( - "OPEN_EXR_MULTILAYER" if multilayer else "OPEN_EXR") - elif ext == "bmp": - image_settings.file_format = "BMP" - elif ext == "rgb": - image_settings.file_format = "IRIS" - elif ext == "png": - image_settings.file_format = "PNG" - elif ext == "jpeg": - image_settings.file_format = "JPEG" - elif ext == "jp2": - image_settings.file_format = "JPEG2000" - elif ext == "tga": - image_settings.file_format = "TARGA" - elif ext == "tif": - image_settings.file_format = "TIFF" - - -def set_render_passes(settings, renderer): - aov_list = set(settings["blender"]["RenderSettings"]["aov_list"]) - custom_passes = settings["blender"]["RenderSettings"]["custom_passes"] - - # Common passes for both renderers - vl = bpy.context.view_layer - - # Data Passes - vl.use_pass_combined = "combined" in aov_list - vl.use_pass_z = "z" in aov_list - vl.use_pass_mist = "mist" in aov_list - vl.use_pass_normal = "normal" in aov_list - - # Light Passes - vl.use_pass_diffuse_direct = "diffuse_light" in aov_list - vl.use_pass_diffuse_color = "diffuse_color" in aov_list - vl.use_pass_glossy_direct = "specular_light" in aov_list - vl.use_pass_glossy_color = "specular_color" in aov_list - vl.use_pass_emit = "emission" in aov_list - vl.use_pass_environment = "environment" in aov_list - vl.use_pass_ambient_occlusion = "ao" in aov_list - - # Cryptomatte Passes - vl.use_pass_cryptomatte_object = "cryptomatte_object" in aov_list - vl.use_pass_cryptomatte_material = "cryptomatte_material" in aov_list - vl.use_pass_cryptomatte_asset = "cryptomatte_asset" in aov_list - - if renderer == "BLENDER_EEVEE": - # Eevee exclusive passes - eevee = vl.eevee - - # Light Passes - vl.use_pass_shadow = "shadow" in aov_list - eevee.use_pass_volume_direct = "volume_light" in aov_list - - # Effects Passes - eevee.use_pass_bloom = "bloom" in aov_list - eevee.use_pass_transparent = "transparent" in aov_list - - # Cryptomatte Passes - vl.use_pass_cryptomatte_accurate = "cryptomatte_accurate" in aov_list - elif renderer == "CYCLES": - # Cycles exclusive passes - cycles = vl.cycles - - # Data Passes - vl.use_pass_position = "position" in aov_list - vl.use_pass_vector = "vector" in aov_list - vl.use_pass_uv = "uv" in aov_list - cycles.denoising_store_passes = "denoising" in aov_list - vl.use_pass_object_index = "object_index" in aov_list - vl.use_pass_material_index = "material_index" in aov_list - cycles.pass_debug_sample_count = "sample_count" in aov_list - - # Light Passes - vl.use_pass_diffuse_indirect = "diffuse_indirect" in aov_list - vl.use_pass_glossy_indirect = "specular_indirect" in aov_list - vl.use_pass_transmission_direct = "transmission_direct" in aov_list - vl.use_pass_transmission_indirect = "transmission_indirect" in aov_list - vl.use_pass_transmission_color = "transmission_color" in aov_list - cycles.use_pass_volume_direct = "volume_light" in aov_list - cycles.use_pass_volume_indirect = "volume_indirect" in aov_list - cycles.use_pass_shadow_catcher = "shadow" in aov_list - - aovs_names = [aov.name for aov in vl.aovs] - for cp in custom_passes: - cp_name = cp["attribute"] - if cp_name not in aovs_names: - aov = vl.aovs.add() - aov.name = cp_name - else: - aov = vl.aovs[cp_name] - aov.type = cp["value"] - - return list(aov_list), custom_passes - - -def _create_aov_slot(name, aov_sep, slots, rpass_name, multi_exr, output_path): - filename = f"{name}{aov_sep}{rpass_name}.####" - slot = slots.new(rpass_name if multi_exr else filename) - filepath = str(output_path / filename.lstrip("/")) - - return slot, filepath - - -def set_node_tree( - output_path, render_product, name, aov_sep, ext, multilayer, compositing -): - # Set the scene to use the compositor node tree to render - bpy.context.scene.use_nodes = True - - tree = bpy.context.scene.node_tree - - comp_layer_type = "CompositorNodeRLayers" - output_type = "CompositorNodeOutputFile" - compositor_type = "CompositorNodeComposite" - - # Get the Render Layer, Composite and the previous output nodes - render_layer_node = None - composite_node = None - old_output_node = None - for node in tree.nodes: - if node.bl_idname == comp_layer_type: - render_layer_node = node - elif node.bl_idname == compositor_type: - composite_node = node - elif node.bl_idname == output_type and "AYON" in node.name: - old_output_node = node - if render_layer_node and composite_node and old_output_node: - break - - # If there's not a Render Layers node, we create it - if not render_layer_node: - render_layer_node = tree.nodes.new(comp_layer_type) - - # Get the enabled output sockets, that are the active passes for the - # render. - # We also exclude some layers. - exclude_sockets = ["Image", "Alpha", "Noisy Image"] - passes = [ - socket - for socket in render_layer_node.outputs - if socket.enabled and socket.name not in exclude_sockets - ] - - # Create a new output node - output = tree.nodes.new(output_type) - - image_settings = bpy.context.scene.render.image_settings - output.format.file_format = image_settings.file_format - - slots = None - - # In case of a multilayer exr, we don't need to use the output node, - # because the blender render already outputs a multilayer exr. - multi_exr = ext == "exr" and multilayer - slots = output.layer_slots if multi_exr else output.file_slots - output.base_path = render_product if multi_exr else str(output_path) - - slots.clear() - - aov_file_products = [] - - old_links = { - link.from_socket.name: link for link in tree.links - if link.to_node == old_output_node} - - # Create a new socket for the beauty output - pass_name = "rgba" if multi_exr else "beauty" - slot, _ = _create_aov_slot( - name, aov_sep, slots, pass_name, multi_exr, output_path) - tree.links.new(render_layer_node.outputs["Image"], slot) - - if compositing: - # Create a new socket for the composite output - pass_name = "composite" - comp_socket, filepath = _create_aov_slot( - name, aov_sep, slots, pass_name, multi_exr, output_path) - aov_file_products.append(("Composite", filepath)) - - # For each active render pass, we add a new socket to the output node - # and link it - for rpass in passes: - slot, filepath = _create_aov_slot( - name, aov_sep, slots, rpass.name, multi_exr, output_path) - aov_file_products.append((rpass.name, filepath)) - - # If the rpass was not connected with the old output node, we connect - # it with the new one. - if not old_links.get(rpass.name): - tree.links.new(rpass, slot) - - for link in list(old_links.values()): - # Check if the socket is still available in the new output node. - socket = output.inputs.get(link.to_socket.name) - # If it is, we connect it with the new output node. - if socket: - tree.links.new(link.from_socket, socket) - # Then, we remove the old link. - tree.links.remove(link) - - # If there's a composite node, we connect its input with the new output - if compositing and composite_node: - for link in tree.links: - if link.to_node == composite_node: - tree.links.new(link.from_socket, comp_socket) - break - - if old_output_node: - output.location = old_output_node.location - tree.nodes.remove(old_output_node) - - output.name = "AYON File Output" - output.label = "AYON File Output" - - return [] if multi_exr else aov_file_products - - -def imprint_render_settings(node, data): - RENDER_DATA = "render_data" - if not node.get(RENDER_DATA): - node[RENDER_DATA] = {} - for key, value in data.items(): - if value is None: - continue - node[RENDER_DATA][key] = value - - -def prepare_rendering(asset_group): - name = asset_group.name - - filepath = Path(bpy.data.filepath) - assert filepath, "Workfile not saved. Please save the file first." - - dirpath = filepath.parent - file_name = Path(filepath.name).stem - - project = get_current_project_name() - settings = get_project_settings(project) - - render_folder = get_default_render_folder(settings) - aov_sep = get_aov_separator(settings) - ext = get_image_format(settings) - multilayer = get_multilayer(settings) - renderer = get_renderer(settings) - compositing = get_compositing(settings) - - set_render_format(ext, multilayer) - bpy.context.scene.render.engine = renderer - aov_list, custom_passes = set_render_passes(settings, renderer) - - output_path = Path.joinpath(dirpath, render_folder, file_name) - - render_product = get_render_product(output_path, name, aov_sep) - aov_file_product = set_node_tree( - output_path, render_product, name, aov_sep, - ext, multilayer, compositing) - - # Clear the render filepath, so that the output is handled only by the - # output node in the compositor. - bpy.context.scene.render.filepath = "" - - render_settings = { - "render_folder": render_folder, - "aov_separator": aov_sep, - "image_format": ext, - "multilayer_exr": multilayer, - "aov_list": aov_list, - "custom_passes": custom_passes, - "render_product": render_product, - "aov_file_product": aov_file_product, - "review": True, - } - - imprint_render_settings(asset_group, render_settings) diff --git a/server_addon/blender/client/ayon_blender/api/workio.py b/server_addon/blender/client/ayon_blender/api/workio.py deleted file mode 100644 index e0f333843a..0000000000 --- a/server_addon/blender/client/ayon_blender/api/workio.py +++ /dev/null @@ -1,89 +0,0 @@ -"""Host API required for Work Files.""" - -from pathlib import Path -from typing import List, Optional - -import bpy - - -class OpenFileCacher: - """Store information about opening file. - - When file is opening QApplcation events should not be processed. - """ - opening_file = False - - @classmethod - def post_load(cls): - cls.opening_file = False - - @classmethod - def set_opening(cls): - cls.opening_file = True - - -def open_file(filepath: str) -> Optional[str]: - """Open the scene file in Blender.""" - OpenFileCacher.set_opening() - - preferences = bpy.context.preferences - load_ui = preferences.filepaths.use_load_ui - use_scripts = preferences.filepaths.use_scripts_auto_execute - result = bpy.ops.wm.open_mainfile( - filepath=filepath, - load_ui=load_ui, - use_scripts=use_scripts, - ) - - if result == {'FINISHED'}: - return filepath - return None - - -def save_file(filepath: str, copy: bool = False) -> Optional[str]: - """Save the open scene file.""" - - preferences = bpy.context.preferences - compress = preferences.filepaths.use_file_compression - relative_remap = preferences.filepaths.use_relative_paths - result = bpy.ops.wm.save_as_mainfile( - filepath=filepath, - compress=compress, - relative_remap=relative_remap, - copy=copy, - ) - - if result == {'FINISHED'}: - return filepath - return None - - -def current_file() -> Optional[str]: - """Return the path of the open scene file.""" - - current_filepath = bpy.data.filepath - if Path(current_filepath).is_file(): - return current_filepath - return None - - -def has_unsaved_changes() -> bool: - """Does the open scene file have unsaved changes?""" - - return bpy.data.is_dirty - - -def file_extensions() -> List[str]: - """Return the supported file extensions for Blender scene files.""" - - return [".blend"] - - -def work_root(session: dict) -> str: - """Return the default root to browse for work files.""" - - work_dir = session["AYON_WORKDIR"] - scene_dir = session.get("AVALON_SCENEDIR") - if scene_dir: - return str(Path(work_dir, scene_dir)) - return work_dir diff --git a/server_addon/blender/client/ayon_blender/blender_addon/startup/init.py b/server_addon/blender/client/ayon_blender/blender_addon/startup/init.py deleted file mode 100644 index bd0d52627c..0000000000 --- a/server_addon/blender/client/ayon_blender/blender_addon/startup/init.py +++ /dev/null @@ -1,10 +0,0 @@ -from ayon_core.pipeline import install_host -from ayon_blender.api import BlenderHost - - -def register(): - install_host(BlenderHost()) - - -def unregister(): - pass diff --git a/server_addon/blender/client/ayon_blender/hooks/pre_add_run_python_script_arg.py b/server_addon/blender/client/ayon_blender/hooks/pre_add_run_python_script_arg.py deleted file mode 100644 index 9041ef7309..0000000000 --- a/server_addon/blender/client/ayon_blender/hooks/pre_add_run_python_script_arg.py +++ /dev/null @@ -1,54 +0,0 @@ -from pathlib import Path - -from ayon_applications import PreLaunchHook, LaunchTypes - - -class AddPythonScriptToLaunchArgs(PreLaunchHook): - """Add python script to be executed before Blender launch.""" - - # Append after file argument - order = 15 - app_groups = {"blender"} - launch_types = {LaunchTypes.local} - - def execute(self): - if not self.launch_context.data.get("python_scripts"): - return - - # Add path to workfile to arguments - for python_script_path in self.launch_context.data["python_scripts"]: - self.log.info( - f"Adding python script {python_script_path} to launch" - ) - # Test script path exists - python_script_path = Path(python_script_path) - if not python_script_path.exists(): - self.log.warning( - f"Python script {python_script_path} doesn't exist. " - "Skipped..." - ) - continue - - if "--" in self.launch_context.launch_args: - # Insert before separator - separator_index = self.launch_context.launch_args.index("--") - self.launch_context.launch_args.insert( - separator_index, - "-P", - ) - self.launch_context.launch_args.insert( - separator_index + 1, - python_script_path.as_posix(), - ) - else: - self.launch_context.launch_args.extend( - ["-P", python_script_path.as_posix()] - ) - - # Ensure separator - if "--" not in self.launch_context.launch_args: - self.launch_context.launch_args.append("--") - - self.launch_context.launch_args.extend( - [*self.launch_context.data.get("script_args", [])] - ) diff --git a/server_addon/blender/client/ayon_blender/hooks/pre_pyside_install.py b/server_addon/blender/client/ayon_blender/hooks/pre_pyside_install.py deleted file mode 100644 index 87a4f5cfad..0000000000 --- a/server_addon/blender/client/ayon_blender/hooks/pre_pyside_install.py +++ /dev/null @@ -1,295 +0,0 @@ -import os -import re -import subprocess -from platform import system -from ayon_applications import PreLaunchHook, LaunchTypes - - -class InstallPySideToBlender(PreLaunchHook): - """Install Qt binding to blender's python packages. - - Prelaunch hook does 2 things: - 1.) Blender's python packages are pushed to the beginning of PYTHONPATH. - 2.) Check if blender has installed PySide2 and will try to install if not. - - For pipeline implementation is required to have Qt binding installed in - blender's python packages. - """ - - app_groups = {"blender"} - launch_types = {LaunchTypes.local} - - def execute(self): - # Prelaunch hook is not crucial - try: - self.inner_execute() - except Exception: - self.log.warning( - "Processing of {} crashed.".format(self.__class__.__name__), - exc_info=True - ) - - def inner_execute(self): - # Get blender's python directory - version_regex = re.compile(r"^([2-4])\.[0-9]+$") - - platform = system().lower() - executable = self.launch_context.executable.executable_path - expected_executable = "blender" - if platform == "windows": - expected_executable += ".exe" - - if os.path.basename(executable).lower() != expected_executable: - self.log.info(( - f"Executable does not lead to {expected_executable} file." - "Can't determine blender's python to check/install" - " Qt binding." - )) - return - - versions_dir = os.path.dirname(executable) - if platform == "darwin": - versions_dir = os.path.join( - os.path.dirname(versions_dir), "Resources" - ) - version_subfolders = [] - for dir_entry in os.scandir(versions_dir): - if dir_entry.is_dir() and version_regex.match(dir_entry.name): - version_subfolders.append(dir_entry.name) - - if not version_subfolders: - self.log.info( - "Didn't find version subfolder next to Blender executable" - ) - return - - if len(version_subfolders) > 1: - self.log.info(( - "Found more than one version subfolder next" - " to blender executable. {}" - ).format(", ".join([ - '"./{}"'.format(name) - for name in version_subfolders - ]))) - return - - version_subfolder = version_subfolders[0] - before_blender_4 = False - if int(version_regex.match(version_subfolder).group(1)) < 4: - before_blender_4 = True - # Blender 4 has Python 3.11 which does not support 'PySide2' - # QUESTION could we always install PySide6? - qt_binding = "PySide2" if before_blender_4 else "PySide6" - # Use PySide6 6.6.3 because 6.7.0 had a bug - # - 'QTextEdit' can't be added to 'QBoxLayout' - qt_binding_version = None if before_blender_4 else "6.6.3" - - python_dir = os.path.join(versions_dir, version_subfolder, "python") - python_lib = os.path.join(python_dir, "lib") - python_version = "python" - - if platform != "windows": - for dir_entry in os.scandir(python_lib): - if dir_entry.is_dir() and dir_entry.name.startswith("python"): - python_lib = dir_entry.path - python_version = dir_entry.name - break - - # Change PYTHONPATH to contain blender's packages as first - python_paths = [ - python_lib, - os.path.join(python_lib, "site-packages"), - ] - python_path = self.launch_context.env.get("PYTHONPATH") or "" - for path in python_path.split(os.pathsep): - if path: - python_paths.append(path) - - self.launch_context.env["PYTHONPATH"] = os.pathsep.join(python_paths) - - # Get blender's python executable - python_bin = os.path.join(python_dir, "bin") - if platform == "windows": - python_executable = os.path.join(python_bin, "python.exe") - else: - python_executable = os.path.join(python_bin, python_version) - # Check for python with enabled 'pymalloc' - if not os.path.exists(python_executable): - python_executable += "m" - - if not os.path.exists(python_executable): - self.log.warning( - "Couldn't find python executable for blender. {}".format( - executable - ) - ) - return - - # Check if PySide2 is installed and skip if yes - if self.is_pyside_installed(python_executable, qt_binding): - self.log.debug("Blender has already installed PySide2.") - return - - # Install PySide2 in blender's python - if platform == "windows": - result = self.install_pyside_windows( - python_executable, - qt_binding, - qt_binding_version, - before_blender_4, - ) - else: - result = self.install_pyside( - python_executable, - qt_binding, - qt_binding_version, - ) - - if result: - self.log.info( - f"Successfully installed {qt_binding} module to blender." - ) - else: - self.log.warning( - f"Failed to install {qt_binding} module to blender." - ) - - def install_pyside_windows( - self, - python_executable, - qt_binding, - qt_binding_version, - before_blender_4, - ): - """Install PySide2 python module to blender's python. - - Installation requires administration rights that's why it is required - to use "pywin32" module which can execute command's and ask for - administration rights. - """ - try: - import win32con - import win32process - import win32event - import pywintypes - from win32comext.shell.shell import ShellExecuteEx - from win32comext.shell import shellcon - except Exception: - self.log.warning("Couldn't import \"pywin32\" modules") - return - - if qt_binding_version: - qt_binding = f"{qt_binding}=={qt_binding_version}" - - try: - # Parameters - # - use "-m pip" as module pip to install PySide2 and argument - # "--ignore-installed" is to force install module to blender's - # site-packages and make sure it is binary compatible - fake_exe = "fake.exe" - site_packages_prefix = os.path.dirname( - os.path.dirname(python_executable) - ) - args = [ - fake_exe, - "-m", - "pip", - "install", - "--ignore-installed", - qt_binding, - ] - if not before_blender_4: - # Define prefix for site package - # Python in blender 4.x is installing packages in AppData and - # not in blender's directory. - args.extend(["--prefix", site_packages_prefix]) - - parameters = ( - subprocess.list2cmdline(args) - .lstrip(fake_exe) - .lstrip(" ") - ) - - # Execute command and ask for administrator's rights - process_info = ShellExecuteEx( - nShow=win32con.SW_SHOWNORMAL, - fMask=shellcon.SEE_MASK_NOCLOSEPROCESS, - lpVerb="runas", - lpFile=python_executable, - lpParameters=parameters, - lpDirectory=os.path.dirname(python_executable) - ) - process_handle = process_info["hProcess"] - win32event.WaitForSingleObject(process_handle, win32event.INFINITE) - returncode = win32process.GetExitCodeProcess(process_handle) - return returncode == 0 - except pywintypes.error: - pass - - def install_pyside( - self, - python_executable, - qt_binding, - qt_binding_version, - ): - """Install Qt binding python module to blender's python.""" - if qt_binding_version: - qt_binding = f"{qt_binding}=={qt_binding_version}" - try: - # Parameters - # - use "-m pip" as module pip to install qt binding and argument - # "--ignore-installed" is to force install module to blender's - # site-packages and make sure it is binary compatible - # TODO find out if blender 4.x on linux/darwin does install - # qt binding to correct place. - args = [ - python_executable, - "-m", - "pip", - "install", - "--ignore-installed", - qt_binding, - ] - process = subprocess.Popen( - args, stdout=subprocess.PIPE, universal_newlines=True - ) - process.communicate() - return process.returncode == 0 - except PermissionError: - self.log.warning( - "Permission denied with command:" - "\"{}\".".format(" ".join(args)) - ) - except OSError as error: - self.log.warning(f"OS error has occurred: \"{error}\".") - except subprocess.SubprocessError: - pass - - def is_pyside_installed(self, python_executable, qt_binding): - """Check if PySide2 module is in blender's pip list. - - Check that PySide2 is installed directly in blender's site-packages. - It is possible that it is installed in user's site-packages but that - may be incompatible with blender's python. - """ - - qt_binding_low = qt_binding.lower() - # Get pip list from blender's python executable - args = [python_executable, "-m", "pip", "list"] - process = subprocess.Popen(args, stdout=subprocess.PIPE) - stdout, _ = process.communicate() - lines = stdout.decode().split(os.linesep) - # Second line contain dashes that define maximum length of module name. - # Second column of dashes define maximum length of module version. - package_dashes, *_ = lines[1].split(" ") - package_len = len(package_dashes) - - # Got through printed lines starting at line 3 - for idx in range(2, len(lines)): - line = lines[idx] - if not line: - continue - package_name = line[0:package_len].strip() - if package_name.lower() == qt_binding_low: - return True - return False diff --git a/server_addon/blender/client/ayon_blender/hooks/pre_windows_console.py b/server_addon/blender/client/ayon_blender/hooks/pre_windows_console.py deleted file mode 100644 index 47303a7af4..0000000000 --- a/server_addon/blender/client/ayon_blender/hooks/pre_windows_console.py +++ /dev/null @@ -1,29 +0,0 @@ -import subprocess -from ayon_applications import PreLaunchHook, LaunchTypes - - -class BlenderConsoleWindows(PreLaunchHook): - """Foundry applications have specific way how to launch them. - - Blender is executed "like" python process so it is required to pass - `CREATE_NEW_CONSOLE` flag on windows to trigger creation of new console. - At the same time the newly created console won't create it's own stdout - and stderr handlers so they should not be redirected to DEVNULL. - """ - - # Should be as last hook because must change launch arguments to string - order = 1000 - app_groups = {"blender"} - platforms = {"windows"} - launch_types = {LaunchTypes.local} - - def execute(self): - # Change `creationflags` to CREATE_NEW_CONSOLE - # - on Windows will blender create new window using it's console - # Set `stdout` and `stderr` to None so new created console does not - # have redirected output to DEVNULL in build - self.launch_context.kwargs.update({ - "creationflags": subprocess.CREATE_NEW_CONSOLE, - "stdout": None, - "stderr": None - }) diff --git a/server_addon/blender/client/ayon_blender/plugins/create/convert_legacy.py b/server_addon/blender/client/ayon_blender/plugins/create/convert_legacy.py deleted file mode 100644 index 095f3ab919..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/create/convert_legacy.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- -"""Converter for legacy Houdini products.""" -from ayon_core.pipeline.create.creator_plugins import ProductConvertorPlugin -from ayon_blender.api.lib import imprint - - -class BlenderLegacyConvertor(ProductConvertorPlugin): - """Find and convert any legacy products in the scene. - - This Converter will find all legacy products in the scene and will - transform them to the current system. Since the old products doesn't - retain any information about their original creators, the only mapping - we can do is based on their product types. - - Its limitation is that you can have multiple creators creating product - of the same product type and there is no way to handle it. This code - should nevertheless cover all creators that came with OpenPype. - - """ - identifier = "io.openpype.creators.blender.legacy" - product_type_to_id = { - "action": "io.openpype.creators.blender.action", - "camera": "io.openpype.creators.blender.camera", - "animation": "io.openpype.creators.blender.animation", - "blendScene": "io.openpype.creators.blender.blendscene", - "layout": "io.openpype.creators.blender.layout", - "model": "io.openpype.creators.blender.model", - "pointcache": "io.openpype.creators.blender.pointcache", - "render": "io.openpype.creators.blender.render", - "review": "io.openpype.creators.blender.review", - "rig": "io.openpype.creators.blender.rig", - } - - def __init__(self, *args, **kwargs): - super(BlenderLegacyConvertor, self).__init__(*args, **kwargs) - self.legacy_instances = {} - - def find_instances(self): - """Find legacy products in the scene. - - Legacy products are the ones that doesn't have `creator_identifier` - parameter on them. - - This is using cached entries done in - :py:meth:`~BlenderCreator.cache_instance_data()` - - """ - self.legacy_instances = self.collection_shared_data.get( - "blender_cached_legacy_instances") - if not self.legacy_instances: - return - self.add_convertor_item( - "Found {} incompatible product{}".format( - len(self.legacy_instances), - "s" if len(self.legacy_instances) > 1 else "" - ) - ) - - def convert(self): - """Convert all legacy products to current. - - It is enough to add `creator_identifier` and `instance_node`. - - """ - if not self.legacy_instances: - return - - for product_type, instance_nodes in self.legacy_instances.items(): - if product_type in self.product_type_to_id: - for instance_node in instance_nodes: - creator_identifier = self.product_type_to_id[product_type] - self.log.info( - "Converting {} to {}".format(instance_node.name, - creator_identifier) - ) - imprint(instance_node, data={ - "creator_identifier": creator_identifier - }) diff --git a/server_addon/blender/client/ayon_blender/plugins/create/create_action.py b/server_addon/blender/client/ayon_blender/plugins/create/create_action.py deleted file mode 100644 index 123a2e0df1..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/create/create_action.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Create an animation asset.""" - -import bpy - -from ayon_blender.api import lib, plugin - - -class CreateAction(plugin.BlenderCreator): - """Action output for character rigs.""" - - identifier = "io.openpype.creators.blender.action" - label = "Action" - product_type = "action" - icon = "male" - - def create( - self, product_name: str, instance_data: dict, pre_create_data: dict - ): - # Run parent create method - collection = super().create( - product_name, instance_data, pre_create_data - ) - - # Get instance name - name = plugin.prepare_scene_name( - instance_data["folderPath"], product_name - ) - - if pre_create_data.get("use_selection"): - for obj in lib.get_selection(): - if (obj.animation_data is not None - and obj.animation_data.action is not None): - - empty_obj = bpy.data.objects.new(name=name, - object_data=None) - empty_obj.animation_data_create() - empty_obj.animation_data.action = obj.animation_data.action - empty_obj.animation_data.action.name = name - collection.objects.link(empty_obj) - - return collection diff --git a/server_addon/blender/client/ayon_blender/plugins/create/create_animation.py b/server_addon/blender/client/ayon_blender/plugins/create/create_animation.py deleted file mode 100644 index cfb2c254ef..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/create/create_animation.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Create an animation asset.""" - -from ayon_blender.api import plugin, lib - - -class CreateAnimation(plugin.BlenderCreator): - """Animation output for character rigs.""" - - identifier = "io.openpype.creators.blender.animation" - label = "Animation" - product_type = "animation" - icon = "male" - - def create( - self, product_name: str, instance_data: dict, pre_create_data: dict - ): - # Run parent create method - collection = super().create( - product_name, instance_data, pre_create_data - ) - - if pre_create_data.get("use_selection"): - selected = lib.get_selection() - for obj in selected: - collection.objects.link(obj) - elif pre_create_data.get("asset_group"): - # Use for Load Blend automated creation of animation instances - # upon loading rig files - obj = pre_create_data.get("asset_group") - collection.objects.link(obj) - - return collection diff --git a/server_addon/blender/client/ayon_blender/plugins/create/create_blendScene.py b/server_addon/blender/client/ayon_blender/plugins/create/create_blendScene.py deleted file mode 100644 index 363a35883b..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/create/create_blendScene.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Create a Blender scene asset.""" - -import bpy - -from ayon_blender.api import plugin, lib - - -class CreateBlendScene(plugin.BlenderCreator): - """Generic group of assets.""" - - identifier = "io.openpype.creators.blender.blendscene" - label = "Blender Scene" - product_type = "blendScene" - icon = "cubes" - - maintain_selection = False - - def create( - self, product_name: str, instance_data: dict, pre_create_data: dict - ): - - instance_node = super().create(product_name, - instance_data, - pre_create_data) - - if pre_create_data.get("use_selection"): - selection = lib.get_selection(include_collections=True) - for data in selection: - if isinstance(data, bpy.types.Collection): - instance_node.children.link(data) - elif isinstance(data, bpy.types.Object): - instance_node.objects.link(data) - - return instance_node diff --git a/server_addon/blender/client/ayon_blender/plugins/create/create_camera.py b/server_addon/blender/client/ayon_blender/plugins/create/create_camera.py deleted file mode 100644 index 8cfe8f989b..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/create/create_camera.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Create a camera asset.""" - -import bpy - -from ayon_blender.api import plugin, lib -from ayon_blender.api.pipeline import AVALON_INSTANCES - - -class CreateCamera(plugin.BlenderCreator): - """Polygonal static geometry.""" - - identifier = "io.openpype.creators.blender.camera" - label = "Camera" - product_type = "camera" - icon = "video-camera" - - create_as_asset_group = True - - def create( - self, product_name: str, instance_data: dict, pre_create_data: dict - ): - - asset_group = super().create(product_name, - instance_data, - pre_create_data) - - bpy.context.view_layer.objects.active = asset_group - if pre_create_data.get("use_selection"): - for obj in lib.get_selection(): - obj.parent = asset_group - else: - plugin.deselect_all() - camera = bpy.data.cameras.new(product_name) - camera_obj = bpy.data.objects.new(product_name, camera) - - instances = bpy.data.collections.get(AVALON_INSTANCES) - instances.objects.link(camera_obj) - - bpy.context.view_layer.objects.active = asset_group - camera_obj.parent = asset_group - - return asset_group diff --git a/server_addon/blender/client/ayon_blender/plugins/create/create_layout.py b/server_addon/blender/client/ayon_blender/plugins/create/create_layout.py deleted file mode 100644 index 1e0f8effdd..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/create/create_layout.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Create a layout asset.""" - -import bpy - -from ayon_blender.api import plugin, lib - - -class CreateLayout(plugin.BlenderCreator): - """Layout output for character rigs.""" - - identifier = "io.openpype.creators.blender.layout" - label = "Layout" - product_type = "layout" - icon = "cubes" - - create_as_asset_group = True - - def create( - self, product_name: str, instance_data: dict, pre_create_data: dict - ): - - asset_group = super().create(product_name, - instance_data, - pre_create_data) - - # Add selected objects to instance - if pre_create_data.get("use_selection"): - bpy.context.view_layer.objects.active = asset_group - for obj in lib.get_selection(): - obj.parent = asset_group - - return asset_group diff --git a/server_addon/blender/client/ayon_blender/plugins/create/create_model.py b/server_addon/blender/client/ayon_blender/plugins/create/create_model.py deleted file mode 100644 index 7e8bf566ea..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/create/create_model.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Create a model asset.""" - -import bpy - -from ayon_blender.api import plugin, lib - - -class CreateModel(plugin.BlenderCreator): - """Polygonal static geometry.""" - - identifier = "io.openpype.creators.blender.model" - label = "Model" - product_type = "model" - icon = "cube" - - create_as_asset_group = True - - def create( - self, product_name: str, instance_data: dict, pre_create_data: dict - ): - asset_group = super().create(product_name, - instance_data, - pre_create_data) - - # Add selected objects to instance - if pre_create_data.get("use_selection"): - bpy.context.view_layer.objects.active = asset_group - for obj in lib.get_selection(): - obj.parent = asset_group - - return asset_group diff --git a/server_addon/blender/client/ayon_blender/plugins/create/create_pointcache.py b/server_addon/blender/client/ayon_blender/plugins/create/create_pointcache.py deleted file mode 100644 index 9730ddb89d..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/create/create_pointcache.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Create a pointcache asset.""" - -from ayon_blender.api import plugin, lib - - -class CreatePointcache(plugin.BlenderCreator): - """Polygonal static geometry.""" - - identifier = "io.openpype.creators.blender.pointcache" - label = "Point Cache" - product_type = "pointcache" - icon = "gears" - - def create( - self, product_name: str, instance_data: dict, pre_create_data: dict - ): - # Run parent create method - collection = super().create( - product_name, instance_data, pre_create_data - ) - - if pre_create_data.get("use_selection"): - objects = lib.get_selection() - for obj in objects: - collection.objects.link(obj) - if obj.type == 'EMPTY': - objects.extend(obj.children) - - return collection diff --git a/server_addon/blender/client/ayon_blender/plugins/create/create_render.py b/server_addon/blender/client/ayon_blender/plugins/create/create_render.py deleted file mode 100644 index 6bbedb957f..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/create/create_render.py +++ /dev/null @@ -1,45 +0,0 @@ -"""Create render.""" -import bpy - -from ayon_core.lib import version_up -from ayon_blender.api import plugin -from ayon_blender.api.render_lib import prepare_rendering -from ayon_blender.api.workio import save_file - - -class CreateRenderlayer(plugin.BlenderCreator): - """Single baked camera.""" - - identifier = "io.openpype.creators.blender.render" - label = "Render" - product_type = "render" - icon = "eye" - - def create( - self, product_name: str, instance_data: dict, pre_create_data: dict - ): - try: - # Run parent create method - collection = super().create( - product_name, instance_data, pre_create_data - ) - - prepare_rendering(collection) - except Exception: - # Remove the instance if there was an error - bpy.data.collections.remove(collection) - raise - - # TODO: this is undesiderable, but it's the only way to be sure that - # the file is saved before the render starts. - # Blender, by design, doesn't set the file as dirty if modifications - # happen by script. So, when creating the instance and setting the - # render settings, the file is not marked as dirty. This means that - # there is the risk of sending to deadline a file without the right - # settings. Even the validator to check that the file is saved will - # detect the file as saved, even if it isn't. The only solution for - # now it is to force the file to be saved. - filepath = version_up(bpy.data.filepath) - save_file(filepath, copy=False) - - return collection diff --git a/server_addon/blender/client/ayon_blender/plugins/create/create_review.py b/server_addon/blender/client/ayon_blender/plugins/create/create_review.py deleted file mode 100644 index dbef9e371f..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/create/create_review.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Create review.""" - -from ayon_blender.api import plugin, lib - - -class CreateReview(plugin.BlenderCreator): - """Single baked camera.""" - - identifier = "io.openpype.creators.blender.review" - label = "Review" - product_type = "review" - icon = "video-camera" - - def create( - self, product_name: str, instance_data: dict, pre_create_data: dict - ): - # Run parent create method - collection = super().create( - product_name, instance_data, pre_create_data - ) - - if pre_create_data.get("use_selection"): - selected = lib.get_selection() - for obj in selected: - collection.objects.link(obj) - - return collection diff --git a/server_addon/blender/client/ayon_blender/plugins/create/create_rig.py b/server_addon/blender/client/ayon_blender/plugins/create/create_rig.py deleted file mode 100644 index aad24bda69..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/create/create_rig.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Create a rig asset.""" - -import bpy - -from ayon_blender.api import plugin, lib - - -class CreateRig(plugin.BlenderCreator): - """Artist-friendly rig with controls to direct motion.""" - - identifier = "io.openpype.creators.blender.rig" - label = "Rig" - product_type = "rig" - icon = "wheelchair" - - create_as_asset_group = True - - def create( - self, product_name: str, instance_data: dict, pre_create_data: dict - ): - asset_group = super().create(product_name, - instance_data, - pre_create_data) - - # Add selected objects to instance - if pre_create_data.get("use_selection"): - bpy.context.view_layer.objects.active = asset_group - for obj in lib.get_selection(): - obj.parent = asset_group - - return asset_group diff --git a/server_addon/blender/client/ayon_blender/plugins/create/create_usd.py b/server_addon/blender/client/ayon_blender/plugins/create/create_usd.py deleted file mode 100644 index d7770b15f7..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/create/create_usd.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Create a USD Export.""" - -from ayon_blender.api import plugin, lib - - -class CreateUSD(plugin.BlenderCreator): - """Create USD Export""" - - identifier = "io.openpype.creators.blender.usd" - name = "usdMain" - label = "USD" - product_type = "usd" - icon = "gears" - - def create( - self, product_name: str, instance_data: dict, pre_create_data: dict - ): - # Run parent create method - collection = super().create( - product_name, instance_data, pre_create_data - ) - - if pre_create_data.get("use_selection"): - objects = lib.get_selection() - for obj in objects: - collection.objects.link(obj) - if obj.type == 'EMPTY': - objects.extend(obj.children) - - return collection diff --git a/server_addon/blender/client/ayon_blender/plugins/create/create_workfile.py b/server_addon/blender/client/ayon_blender/plugins/create/create_workfile.py deleted file mode 100644 index 03cfc322a9..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/create/create_workfile.py +++ /dev/null @@ -1,132 +0,0 @@ -import bpy -import ayon_api - -from ayon_core.pipeline import CreatedInstance, AutoCreator -from ayon_blender.api.plugin import BlenderCreator -from ayon_blender.api.pipeline import ( - AVALON_PROPERTY, - AVALON_CONTAINERS -) - - -class CreateWorkfile(BlenderCreator, AutoCreator): - """Workfile auto-creator. - - The workfile instance stores its data on the `AVALON_CONTAINERS` collection - as custom attributes, because unlike other instances it doesn't have an - instance node of its own. - - """ - identifier = "io.openpype.creators.blender.workfile" - label = "Workfile" - product_type = "workfile" - icon = "fa5.file" - - def create(self): - """Create workfile instances.""" - workfile_instance = next( - ( - instance for instance in self.create_context.instances - if instance.creator_identifier == self.identifier - ), - None, - ) - - project_name = self.project_name - folder_path = self.create_context.get_current_folder_path() - task_name = self.create_context.get_current_task_name() - host_name = self.create_context.host_name - - existing_folder_path = None - if workfile_instance is not None: - existing_folder_path = workfile_instance.get("folderPath") - - if not workfile_instance: - folder_entity = ayon_api.get_folder_by_path( - project_name, folder_path - ) - task_entity = ayon_api.get_task_by_name( - project_name, folder_entity["id"], task_name - ) - product_name = self.get_product_name( - project_name, - folder_entity, - task_entity, - task_name, - host_name, - ) - data = { - "folderPath": folder_path, - "task": task_name, - "variant": task_name, - } - data.update( - self.get_dynamic_data( - project_name, - folder_entity, - task_entity, - task_name, - host_name, - workfile_instance, - ) - ) - self.log.info("Auto-creating workfile instance...") - workfile_instance = CreatedInstance( - self.product_type, product_name, data, self - ) - self._add_instance_to_context(workfile_instance) - - elif ( - existing_folder_path != folder_path - or workfile_instance["task"] != task_name - ): - # Update instance context if it's different - folder_entity = ayon_api.get_folder_by_path( - project_name, folder_path - ) - task_entity = ayon_api.get_task_by_name( - project_name, folder_entity["id"], task_name - ) - product_name = self.get_product_name( - project_name, - folder_entity, - task_entity, - self.default_variant, - host_name, - ) - - workfile_instance["folderPath"] = folder_path - workfile_instance["task"] = task_name - workfile_instance["productName"] = product_name - - instance_node = bpy.data.collections.get(AVALON_CONTAINERS) - if not instance_node: - instance_node = bpy.data.collections.new(name=AVALON_CONTAINERS) - workfile_instance.transient_data["instance_node"] = instance_node - - def collect_instances(self): - - instance_node = bpy.data.collections.get(AVALON_CONTAINERS) - if not instance_node: - return - - property = instance_node.get(AVALON_PROPERTY) - if not property: - return - - # Create instance object from existing data - instance = CreatedInstance.from_existing( - instance_data=property.to_dict(), - creator=self - ) - instance.transient_data["instance_node"] = instance_node - - # Add instance to create context - self._add_instance_to_context(instance) - - def remove_instances(self, instances): - for instance in instances: - node = instance.transient_data["instance_node"] - del node[AVALON_PROPERTY] - - self._remove_instance_from_context(instance) diff --git a/server_addon/blender/client/ayon_blender/plugins/load/import_workfile.py b/server_addon/blender/client/ayon_blender/plugins/load/import_workfile.py deleted file mode 100644 index 16cba6913d..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/load/import_workfile.py +++ /dev/null @@ -1,84 +0,0 @@ -import bpy - -from ayon_blender.api import plugin - - -def append_workfile(context, fname, do_import): - folder_name = context["folder"]["name"] - product_name = context["product"]["name"] - - group_name = plugin.prepare_scene_name(folder_name, product_name) - - # We need to preserve the original names of the scenes, otherwise, - # if there are duplicate names in the current workfile, the imported - # scenes will be renamed by Blender to avoid conflicts. - original_scene_names = [] - - with bpy.data.libraries.load(fname) as (data_from, data_to): - for attr in dir(data_to): - if attr == "scenes": - for scene in data_from.scenes: - original_scene_names.append(scene) - setattr(data_to, attr, getattr(data_from, attr)) - - current_scene = bpy.context.scene - - for scene, s_name in zip(data_to.scenes, original_scene_names): - scene.name = f"{group_name}_{s_name}" - if do_import: - collection = bpy.data.collections.new(f"{group_name}_{s_name}") - for obj in scene.objects: - collection.objects.link(obj) - current_scene.collection.children.link(collection) - for coll in scene.collection.children: - collection.children.link(coll) - - -class AppendBlendLoader(plugin.BlenderLoader): - """Append workfile in Blender (unmanaged) - - Warning: - The loaded content will be unmanaged and is *not* visible in the - scene inventory. It's purely intended to merge content into your scene - so you could also use it as a new base. - """ - - representations = {"blend"} - product_types = {"workfile"} - - label = "Append Workfile" - order = 9 - icon = "arrow-circle-down" - color = "#775555" - - def load(self, context, name=None, namespace=None, data=None): - path = self.filepath_from_context(context) - append_workfile(context, path, False) - - # We do not containerize imported content, it remains unmanaged - return - - -class ImportBlendLoader(plugin.BlenderLoader): - """Import workfile in the current Blender scene (unmanaged) - - Warning: - The loaded content will be unmanaged and is *not* visible in the - scene inventory. It's purely intended to merge content into your scene - so you could also use it as a new base. - """ - - representations = {"blend"} - product_types = {"workfile"} - - label = "Import Workfile" - order = 9 - icon = "arrow-circle-down" - color = "#775555" - - def load(self, context, name=None, namespace=None, data=None): - path = self.filepath_from_context(context) - append_workfile(context, path, True) - - # We do not containerize imported content, it remains unmanaged - return diff --git a/server_addon/blender/client/ayon_blender/plugins/load/load_action.py b/server_addon/blender/client/ayon_blender/plugins/load/load_action.py deleted file mode 100644 index ddfaa94044..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/load/load_action.py +++ /dev/null @@ -1,293 +0,0 @@ -"""Load an action in Blender.""" - -import logging -from pathlib import Path -from pprint import pformat -from typing import Dict, List, Optional - -import bpy -from ayon_core.pipeline import get_representation_path -from ayon_blender.api import plugin -from ayon_blender.api.pipeline import ( - containerise_existing, - AVALON_PROPERTY, -) - -logger = logging.getLogger("ayon").getChild("blender").getChild("load_action") - - -class BlendActionLoader(plugin.BlenderLoader): - """Load action from a .blend file. - - Warning: - Loading the same asset more then once is not properly supported at the - moment. - """ - - product_types = {"action"} - representations = {"blend"} - - label = "Link Action" - icon = "code-fork" - color = "orange" - - def process_asset( - self, context: dict, name: str, namespace: Optional[str] = None, - options: Optional[Dict] = None - ) -> Optional[List]: - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - - libpath = self.filepath_from_context(context) - folder_name = context["folder"]["name"] - product_name = context["product"]["name"] - lib_container = plugin.prepare_scene_name(folder_name, product_name) - container_name = plugin.prepare_scene_name( - folder_name, product_name, namespace - ) - - container = bpy.data.collections.new(lib_container) - container.name = container_name - containerise_existing( - container, - name, - namespace, - context, - self.__class__.__name__, - ) - - container_metadata = container.get(AVALON_PROPERTY) - - container_metadata["libpath"] = libpath - container_metadata["lib_container"] = lib_container - - relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load( - libpath, link=True, relative=relative - ) as (_, data_to): - data_to.collections = [lib_container] - - collection = bpy.context.scene.collection - - collection.children.link(bpy.data.collections[lib_container]) - - animation_container = collection.children[lib_container].make_local() - - objects_list = [] - - # Link meshes first, then armatures. - # The armature is unparented for all the non-local meshes, - # when it is made local. - for obj in animation_container.objects: - - obj = obj.make_local() - - anim_data = obj.animation_data - - if anim_data is not None and anim_data.action is not None: - - anim_data.action.make_local() - - if not obj.get(AVALON_PROPERTY): - - obj[AVALON_PROPERTY] = dict() - - avalon_info = obj[AVALON_PROPERTY] - avalon_info.update({"container_name": container_name}) - - objects_list.append(obj) - - animation_container.pop(AVALON_PROPERTY) - - # Save the list of objects in the metadata container - container_metadata["objects"] = objects_list - - bpy.ops.object.select_all(action='DESELECT') - - nodes = list(container.objects) - nodes.append(container) - self[:] = nodes - return nodes - - def update(self, container: Dict, context: Dict): - """Update the loaded asset. - - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - - Warning: - No nested collections are supported at the moment! - """ - repre_entity = context["representation"] - collection = bpy.data.collections.get( - container["objectName"] - ) - - libpath = Path(get_representation_path(repre_entity)) - extension = libpath.suffix.lower() - - logger.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(repre_entity, indent=2), - ) - - assert collection, ( - f"The asset is not loaded: {container['objectName']}" - ) - assert not (collection.children), ( - "Nested collections are not supported." - ) - assert libpath, ( - "No existing library file found for {container['objectName']}" - ) - assert libpath.is_file(), ( - f"The file doesn't exist: {libpath}" - ) - assert extension in plugin.VALID_EXTENSIONS, ( - f"Unsupported file: {libpath}" - ) - - collection_metadata = collection.get(AVALON_PROPERTY) - - collection_libpath = collection_metadata["libpath"] - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) - ) - normalized_libpath = ( - str(Path(bpy.path.abspath(str(libpath))).resolve()) - ) - logger.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, - normalized_libpath, - ) - if normalized_collection_libpath == normalized_libpath: - logger.info("Library already loaded, not updating...") - return - - strips = [] - - for obj in list(collection_metadata["objects"]): - # Get all the strips that use the action - arm_objs = [ - arm for arm in bpy.data.objects if arm.type == 'ARMATURE'] - - for armature_obj in arm_objs: - if armature_obj.animation_data is not None: - for track in armature_obj.animation_data.nla_tracks: - for strip in track.strips: - if strip.action == obj.animation_data.action: - strips.append(strip) - - bpy.data.actions.remove(obj.animation_data.action) - bpy.data.objects.remove(obj) - - lib_container = collection_metadata["lib_container"] - - bpy.data.collections.remove(bpy.data.collections[lib_container]) - - relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load( - str(libpath), link=True, relative=relative - ) as (_, data_to): - data_to.collections = [lib_container] - - scene = bpy.context.scene - - scene.collection.children.link(bpy.data.collections[lib_container]) - - anim_container = scene.collection.children[lib_container].make_local() - - objects_list = [] - - # Link meshes first, then armatures. - # The armature is unparented for all the non-local meshes, - # when it is made local. - for obj in anim_container.objects: - - obj = obj.make_local() - - anim_data = obj.animation_data - - if anim_data is not None and anim_data.action is not None: - - anim_data.action.make_local() - - for strip in strips: - - strip.action = anim_data.action - strip.action_frame_end = anim_data.action.frame_range[1] - - if not obj.get(AVALON_PROPERTY): - - obj[AVALON_PROPERTY] = dict() - - avalon_info = obj[AVALON_PROPERTY] - avalon_info.update({"container_name": collection.name}) - - objects_list.append(obj) - - anim_container.pop(AVALON_PROPERTY) - - # Save the list of objects in the metadata container - collection_metadata["objects"] = objects_list - collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = repre_entity["id"] - - bpy.ops.object.select_all(action='DESELECT') - - def remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. - - Arguments: - container (openpype:container-1.0): Container to remove, - from `host.ls()`. - - Returns: - bool: Whether the container was deleted. - - Warning: - No nested collections are supported at the moment! - """ - - collection = bpy.data.collections.get( - container["objectName"] - ) - if not collection: - return False - assert not (collection.children), ( - "Nested collections are not supported." - ) - - collection_metadata = collection.get(AVALON_PROPERTY) - objects = collection_metadata["objects"] - lib_container = collection_metadata["lib_container"] - - for obj in list(objects): - # Get all the strips that use the action - arm_objs = [ - arm for arm in bpy.data.objects if arm.type == 'ARMATURE'] - - for armature_obj in arm_objs: - if armature_obj.animation_data is not None: - for track in armature_obj.animation_data.nla_tracks: - for strip in track.strips: - if strip.action == obj.animation_data.action: - track.strips.remove(strip) - - bpy.data.actions.remove(obj.animation_data.action) - bpy.data.objects.remove(obj) - - bpy.data.collections.remove(bpy.data.collections[lib_container]) - bpy.data.collections.remove(collection) - - return True diff --git a/server_addon/blender/client/ayon_blender/plugins/load/load_animation.py b/server_addon/blender/client/ayon_blender/plugins/load/load_animation.py deleted file mode 100644 index 241b76b600..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/load/load_animation.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Load an animation in Blender.""" - -from typing import Dict, List, Optional - -import bpy - -from ayon_blender.api import plugin -from ayon_blender.api.pipeline import AVALON_PROPERTY - - -class BlendAnimationLoader(plugin.BlenderLoader): - """Load animations from a .blend file. - - Warning: - Loading the same asset more then once is not properly supported at the - moment. - """ - - product_types = {"animation"} - representations = {"blend"} - - label = "Link Animation" - icon = "code-fork" - color = "orange" - - def process_asset( - self, context: dict, name: str, namespace: Optional[str] = None, - options: Optional[Dict] = None - ) -> Optional[List]: - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - libpath = self.filepath_from_context(context) - - with bpy.data.libraries.load( - libpath, link=True, relative=False - ) as (data_from, data_to): - data_to.objects = data_from.objects - data_to.actions = data_from.actions - - container = data_to.objects[0] - - assert container, "No asset group found" - - target_namespace = container.get(AVALON_PROPERTY).get('namespace') - - action = data_to.actions[0].make_local().copy() - - for obj in bpy.data.objects: - if obj.get(AVALON_PROPERTY) and obj.get(AVALON_PROPERTY).get( - 'namespace') == target_namespace: - if obj.children[0]: - if not obj.children[0].animation_data: - obj.children[0].animation_data_create() - obj.children[0].animation_data.action = action - break - - bpy.data.objects.remove(container) - - filename = bpy.path.basename(libpath) - # Blender has a limit of 63 characters for any data name. - # If the filename is longer, it will be truncated. - if len(filename) > 63: - filename = filename[:63] - library = bpy.data.libraries.get(filename) - bpy.data.libraries.remove(library) diff --git a/server_addon/blender/client/ayon_blender/plugins/load/load_audio.py b/server_addon/blender/client/ayon_blender/plugins/load/load_audio.py deleted file mode 100644 index b8682e7c13..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/load/load_audio.py +++ /dev/null @@ -1,227 +0,0 @@ -"""Load audio in Blender.""" - -from pathlib import Path -from pprint import pformat -from typing import Dict, List, Optional - -import bpy - -from ayon_core.pipeline import ( - get_representation_path, - AVALON_CONTAINER_ID, -) -from ayon_blender.api import plugin -from ayon_blender.api.pipeline import ( - AVALON_CONTAINERS, - AVALON_PROPERTY, -) - - -class AudioLoader(plugin.BlenderLoader): - """Load audio in Blender.""" - - product_types = {"audio"} - representations = {"wav"} - - label = "Load Audio" - icon = "volume-up" - color = "orange" - - def process_asset( - self, context: dict, name: str, namespace: Optional[str] = None, - options: Optional[Dict] = None - ) -> Optional[List]: - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - libpath = self.filepath_from_context(context) - folder_name = context["folder"]["name"] - product_name = context["product"]["name"] - - asset_name = plugin.prepare_scene_name(folder_name, product_name) - unique_number = plugin.get_unique_number(folder_name, product_name) - group_name = plugin.prepare_scene_name( - folder_name, product_name, unique_number - ) - namespace = namespace or f"{folder_name}_{unique_number}" - - avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) - if not avalon_container: - avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) - bpy.context.scene.collection.children.link(avalon_container) - - asset_group = bpy.data.objects.new(group_name, object_data=None) - avalon_container.objects.link(asset_group) - - # Blender needs the Sequence Editor in the current window, to be able - # to load the audio. We take one of the areas in the window, save its - # type, and switch to the Sequence Editor. After loading the audio, - # we switch back to the previous area. - window_manager = bpy.context.window_manager - old_type = window_manager.windows[-1].screen.areas[0].type - window_manager.windows[-1].screen.areas[0].type = "SEQUENCE_EDITOR" - - # We override the context to load the audio in the sequence editor. - oc = bpy.context.copy() - oc["area"] = window_manager.windows[-1].screen.areas[0] - - with bpy.context.temp_override(**oc): - bpy.ops.sequencer.sound_strip_add(filepath=libpath, frame_start=1) - - window_manager.windows[-1].screen.areas[0].type = old_type - - p = Path(libpath) - audio = p.name - - asset_group[AVALON_PROPERTY] = { - "schema": "openpype:container-2.0", - "id": AVALON_CONTAINER_ID, - "name": name, - "namespace": namespace or '', - "loader": str(self.__class__.__name__), - "representation": context["representation"]["id"], - "libpath": libpath, - "asset_name": asset_name, - "parent": context["representation"]["versionId"], - "productType": context["product"]["productType"], - "objectName": group_name, - "audio": audio - } - - objects = [] - self[:] = objects - return [objects] - - def exec_update(self, container: Dict, context: Dict): - """Update an audio strip in the sequence editor. - - Arguments: - container (openpype:container-1.0): Container to update, - from `host.ls()`. - representation (openpype:representation-1.0): Representation to - update, from `host.ls()`. - """ - repre_entity = context["representation"] - object_name = container["objectName"] - asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(repre_entity)) - - self.log.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(repre_entity, indent=2), - ) - - assert asset_group, ( - f"The asset is not loaded: {container['objectName']}" - ) - assert libpath, ( - "No existing library file found for {container['objectName']}" - ) - assert libpath.is_file(), ( - f"The file doesn't exist: {libpath}" - ) - - metadata = asset_group.get(AVALON_PROPERTY) - group_libpath = metadata["libpath"] - - normalized_group_libpath = ( - str(Path(bpy.path.abspath(group_libpath)).resolve()) - ) - normalized_libpath = ( - str(Path(bpy.path.abspath(str(libpath))).resolve()) - ) - self.log.debug( - "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_group_libpath, - normalized_libpath, - ) - if normalized_group_libpath == normalized_libpath: - self.log.info("Library already loaded, not updating...") - return - - old_audio = container["audio"] - p = Path(libpath) - new_audio = p.name - - # Blender needs the Sequence Editor in the current window, to be able - # to update the audio. We take one of the areas in the window, save its - # type, and switch to the Sequence Editor. After updating the audio, - # we switch back to the previous area. - window_manager = bpy.context.window_manager - old_type = window_manager.windows[-1].screen.areas[0].type - window_manager.windows[-1].screen.areas[0].type = "SEQUENCE_EDITOR" - - # We override the context to load the audio in the sequence editor. - oc = bpy.context.copy() - oc["area"] = window_manager.windows[-1].screen.areas[0] - - with bpy.context.temp_override(**oc): - # We deselect all sequencer strips, and then select the one we - # need to remove. - bpy.ops.sequencer.select_all(action='DESELECT') - scene = bpy.context.scene - scene.sequence_editor.sequences_all[old_audio].select = True - - bpy.ops.sequencer.delete() - bpy.data.sounds.remove(bpy.data.sounds[old_audio]) - - bpy.ops.sequencer.sound_strip_add( - filepath=str(libpath), frame_start=1) - - window_manager.windows[-1].screen.areas[0].type = old_type - - metadata["libpath"] = str(libpath) - metadata["representation"] = repre_entity["id"] - metadata["parent"] = repre_entity["versionId"] - metadata["audio"] = new_audio - - def exec_remove(self, container: Dict) -> bool: - """Remove an audio strip from the sequence editor and the container. - - Arguments: - container (openpype:container-1.0): Container to remove, - from `host.ls()`. - - Returns: - bool: Whether the container was deleted. - """ - object_name = container["objectName"] - asset_group = bpy.data.objects.get(object_name) - - if not asset_group: - return False - - audio = container["audio"] - - # Blender needs the Sequence Editor in the current window, to be able - # to remove the audio. We take one of the areas in the window, save its - # type, and switch to the Sequence Editor. After removing the audio, - # we switch back to the previous area. - window_manager = bpy.context.window_manager - old_type = window_manager.windows[-1].screen.areas[0].type - window_manager.windows[-1].screen.areas[0].type = "SEQUENCE_EDITOR" - - # We override the context to load the audio in the sequence editor. - oc = bpy.context.copy() - oc["area"] = window_manager.windows[-1].screen.areas[0] - - with bpy.context.temp_override(**oc): - # We deselect all sequencer strips, and then select the one we - # need to remove. - bpy.ops.sequencer.select_all(action='DESELECT') - scene = bpy.context.scene - scene.sequence_editor.sequences_all[audio].select = True - bpy.ops.sequencer.delete() - - window_manager.windows[-1].screen.areas[0].type = old_type - - bpy.data.sounds.remove(bpy.data.sounds[audio]) - - bpy.data.objects.remove(asset_group) - - return True diff --git a/server_addon/blender/client/ayon_blender/plugins/load/load_blend.py b/server_addon/blender/client/ayon_blender/plugins/load/load_blend.py deleted file mode 100644 index c9f3ec0c71..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/load/load_blend.py +++ /dev/null @@ -1,286 +0,0 @@ -from typing import Dict, List, Optional -from pathlib import Path - -import bpy - -from ayon_core.pipeline import ( - get_representation_path, - AVALON_CONTAINER_ID, - registered_host -) -from ayon_core.pipeline.create import CreateContext -from ayon_blender.api import plugin -from ayon_blender.api.lib import imprint -from ayon_blender.api.pipeline import ( - AVALON_CONTAINERS, - AVALON_PROPERTY, -) - - -class BlendLoader(plugin.BlenderLoader): - """Load assets from a .blend file.""" - - product_types = {"model", "rig", "layout", "camera"} - representations = {"blend"} - - label = "Append Blend" - icon = "code-fork" - color = "orange" - - @staticmethod - def _get_asset_container(objects): - empties = [obj for obj in objects if obj.type == 'EMPTY'] - - for empty in empties: - if empty.get(AVALON_PROPERTY) and empty.parent is None: - return empty - - return None - - @staticmethod - def get_all_container_parents(asset_group): - parent_containers = [] - parent = asset_group.parent - while parent: - if parent.get(AVALON_PROPERTY): - parent_containers.append(parent) - parent = parent.parent - - return parent_containers - - def _post_process_layout(self, container, asset, representation): - rigs = [ - obj for obj in container.children_recursive - if ( - obj.type == 'EMPTY' and - obj.get(AVALON_PROPERTY) and - obj.get(AVALON_PROPERTY).get('family') == 'rig' - ) - ] - if not rigs: - return - - # Create animation instances for each rig - creator_identifier = "io.openpype.creators.blender.animation" - host = registered_host() - create_context = CreateContext(host) - - for rig in rigs: - create_context.create( - creator_identifier=creator_identifier, - variant=rig.name.split(':')[-1], - pre_create_data={ - "use_selection": False, - "asset_group": rig - } - ) - - def _process_data(self, libpath, group_name): - # Append all the data from the .blend file - with bpy.data.libraries.load( - libpath, link=False, relative=False - ) as (data_from, data_to): - for attr in dir(data_to): - setattr(data_to, attr, getattr(data_from, attr)) - - members = [] - - # Rename the object to add the asset name - for attr in dir(data_to): - for data in getattr(data_to, attr): - data.name = f"{group_name}:{data.name}" - members.append(data) - - container = self._get_asset_container(data_to.objects) - assert container, "No asset group found" - - container.name = group_name - container.empty_display_type = 'SINGLE_ARROW' - - # Link the collection to the scene - bpy.context.scene.collection.objects.link(container) - - # Link all the container children to the collection - for obj in container.children_recursive: - bpy.context.scene.collection.objects.link(obj) - - # Remove the library from the blend file - filepath = bpy.path.basename(libpath) - # Blender has a limit of 63 characters for any data name. - # If the filepath is longer, it will be truncated. - if len(filepath) > 63: - filepath = filepath[:63] - library = bpy.data.libraries.get(filepath) - bpy.data.libraries.remove(library) - - return container, members - - def process_asset( - self, context: dict, name: str, namespace: Optional[str] = None, - options: Optional[Dict] = None - ) -> Optional[List]: - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - libpath = self.filepath_from_context(context) - folder_name = context["folder"]["name"] - product_name = context["product"]["name"] - - try: - product_type = context["product"]["productType"] - except ValueError: - product_type = "model" - - representation = context["representation"]["id"] - - asset_name = plugin.prepare_scene_name(folder_name, product_name) - unique_number = plugin.get_unique_number(folder_name, product_name) - group_name = plugin.prepare_scene_name( - folder_name, product_name, unique_number - ) - namespace = namespace or f"{folder_name}_{unique_number}" - - avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) - if not avalon_container: - avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) - bpy.context.scene.collection.children.link(avalon_container) - - container, members = self._process_data(libpath, group_name) - - if product_type == "layout": - self._post_process_layout(container, folder_name, representation) - - avalon_container.objects.link(container) - - data = { - "schema": "openpype:container-2.0", - "id": AVALON_CONTAINER_ID, - "name": name, - "namespace": namespace or '', - "loader": str(self.__class__.__name__), - "representation": context["representation"]["id"], - "libpath": libpath, - "asset_name": asset_name, - "parent": context["representation"]["versionId"], - "productType": context["product"]["productType"], - "objectName": group_name, - "members": members, - } - - container[AVALON_PROPERTY] = data - - objects = [ - obj for obj in bpy.data.objects - if obj.name.startswith(f"{group_name}:") - ] - - self[:] = objects - return objects - - def exec_update(self, container: Dict, context: Dict): - """ - Update the loaded asset. - """ - repre_entity = context["representation"] - group_name = container["objectName"] - asset_group = bpy.data.objects.get(group_name) - libpath = Path(get_representation_path(repre_entity)).as_posix() - - assert asset_group, ( - f"The asset is not loaded: {container['objectName']}" - ) - - transform = asset_group.matrix_basis.copy() - old_data = dict(asset_group.get(AVALON_PROPERTY)) - old_members = old_data.get("members", []) - parent = asset_group.parent - - actions = {} - objects_with_anim = [ - obj for obj in asset_group.children_recursive - if obj.animation_data] - for obj in objects_with_anim: - # Check if the object has an action and, if so, add it to a dict - # so we can restore it later. Save and restore the action only - # if it wasn't originally loaded from the current asset. - if obj.animation_data.action not in old_members: - actions[obj.name] = obj.animation_data.action - - self.exec_remove(container) - - asset_group, members = self._process_data(libpath, group_name) - - avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) - avalon_container.objects.link(asset_group) - - asset_group.matrix_basis = transform - asset_group.parent = parent - - # Restore the actions - for obj in asset_group.children_recursive: - if obj.name in actions: - if not obj.animation_data: - obj.animation_data_create() - obj.animation_data.action = actions[obj.name] - - # Restore the old data, but reset members, as they don't exist anymore - # This avoids a crash, because the memory addresses of those members - # are not valid anymore - old_data["members"] = [] - asset_group[AVALON_PROPERTY] = old_data - - new_data = { - "libpath": libpath, - "representation": repre_entity["id"], - "parent": repre_entity["versionId"], - "members": members, - } - - imprint(asset_group, new_data) - - # We need to update all the parent container members - parent_containers = self.get_all_container_parents(asset_group) - - for parent_container in parent_containers: - parent_members = parent_container[AVALON_PROPERTY]["members"] - parent_container[AVALON_PROPERTY]["members"] = ( - parent_members + members) - - def exec_remove(self, container: Dict) -> bool: - """ - Remove an existing container from a Blender scene. - """ - group_name = container["objectName"] - asset_group = bpy.data.objects.get(group_name) - - attrs = [ - attr for attr in dir(bpy.data) - if isinstance( - getattr(bpy.data, attr), - bpy.types.bpy_prop_collection - ) - ] - - members = asset_group.get(AVALON_PROPERTY).get("members", []) - - # We need to update all the parent container members - parent_containers = self.get_all_container_parents(asset_group) - - for parent in parent_containers: - parent.get(AVALON_PROPERTY)["members"] = list(filter( - lambda i: i not in members, - parent.get(AVALON_PROPERTY).get("members", []))) - - for attr in attrs: - for data in getattr(bpy.data, attr): - if data in members: - # Skip the asset group - if data == asset_group: - continue - getattr(bpy.data, attr).remove(data) - - bpy.data.objects.remove(asset_group) diff --git a/server_addon/blender/client/ayon_blender/plugins/load/load_blendscene.py b/server_addon/blender/client/ayon_blender/plugins/load/load_blendscene.py deleted file mode 100644 index 590ab0079e..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/load/load_blendscene.py +++ /dev/null @@ -1,235 +0,0 @@ -from typing import Dict, List, Optional -from pathlib import Path - -import bpy - -from ayon_core.pipeline import ( - get_representation_path, - AVALON_CONTAINER_ID, -) -from ayon_blender.api import plugin -from ayon_blender.api.lib import imprint -from ayon_blender.api.pipeline import ( - AVALON_CONTAINERS, - AVALON_PROPERTY, -) - - -class BlendSceneLoader(plugin.BlenderLoader): - """Load assets from a .blend file.""" - - product_types = {"blendScene"} - representations = {"blend"} - - label = "Append Blend" - icon = "code-fork" - color = "orange" - - @staticmethod - def _get_asset_container(collections): - for coll in collections: - parents = [c for c in collections if c.user_of_id(coll)] - if coll.get(AVALON_PROPERTY) and not parents: - return coll - - return None - - def _process_data(self, libpath, group_name, product_type): - # Append all the data from the .blend file - with bpy.data.libraries.load( - libpath, link=False, relative=False - ) as (data_from, data_to): - for attr in dir(data_to): - setattr(data_to, attr, getattr(data_from, attr)) - - members = [] - - # Rename the object to add the asset name - for attr in dir(data_to): - for data in getattr(data_to, attr): - data.name = f"{group_name}:{data.name}" - members.append(data) - - container = self._get_asset_container( - data_to.collections) - assert container, "No asset group found" - - container.name = group_name - - # Link the group to the scene - bpy.context.scene.collection.children.link(container) - - # Remove the library from the blend file - filepath = bpy.path.basename(libpath) - # Blender has a limit of 63 characters for any data name. - # If the filepath is longer, it will be truncated. - if len(filepath) > 63: - filepath = filepath[:63] - library = bpy.data.libraries.get(filepath) - bpy.data.libraries.remove(library) - - return container, members - - def process_asset( - self, context: dict, name: str, namespace: Optional[str] = None, - options: Optional[Dict] = None - ) -> Optional[List]: - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - libpath = self.filepath_from_context(context) - folder_name = context["folder"]["name"] - product_name = context["product"]["name"] - - try: - product_type = context["product"]["productType"] - except ValueError: - product_type = "model" - - asset_name = plugin.prepare_scene_name(folder_name, product_name) - unique_number = plugin.get_unique_number(folder_name, product_name) - group_name = plugin.prepare_scene_name( - folder_name, product_name, unique_number - ) - namespace = namespace or f"{folder_name}_{unique_number}" - - avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) - if not avalon_container: - avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) - bpy.context.scene.collection.children.link(avalon_container) - - container, members = self._process_data( - libpath, group_name, product_type - ) - - avalon_container.children.link(container) - - data = { - "schema": "openpype:container-2.0", - "id": AVALON_CONTAINER_ID, - "name": name, - "namespace": namespace or '', - "loader": str(self.__class__.__name__), - "representation": context["representation"]["id"], - "libpath": libpath, - "asset_name": asset_name, - "parent": context["representation"]["versionId"], - "productType": context["product"]["productType"], - "objectName": group_name, - "members": members, - } - - container[AVALON_PROPERTY] = data - - objects = [ - obj for obj in bpy.data.objects - if obj.name.startswith(f"{group_name}:") - ] - - self[:] = objects - return objects - - def exec_update(self, container: Dict, context: Dict): - """ - Update the loaded asset. - """ - repre_entity = context["representation"] - group_name = container["objectName"] - asset_group = bpy.data.collections.get(group_name) - libpath = Path(get_representation_path(repre_entity)).as_posix() - - assert asset_group, ( - f"The asset is not loaded: {container['objectName']}" - ) - - # Get the parents of the members of the asset group, so we can - # re-link them after the update. - # Also gets the transform for each object to reapply after the update. - collection_parents = {} - member_transforms = {} - members = asset_group.get(AVALON_PROPERTY).get("members", []) - loaded_collections = {c for c in bpy.data.collections if c in members} - loaded_collections.add(bpy.data.collections.get(AVALON_CONTAINERS)) - for member in members: - if isinstance(member, bpy.types.Object): - member_parents = set(member.users_collection) - member_transforms[member.name] = member.matrix_basis.copy() - elif isinstance(member, bpy.types.Collection): - member_parents = { - c for c in bpy.data.collections if c.user_of_id(member)} - else: - continue - - member_parents = member_parents.difference(loaded_collections) - if member_parents: - collection_parents[member.name] = list(member_parents) - - old_data = dict(asset_group.get(AVALON_PROPERTY)) - - self.exec_remove(container) - - product_type = container.get("productType") - if product_type is None: - product_type = container["family"] - asset_group, members = self._process_data( - libpath, group_name, product_type - ) - - for member in members: - if member.name in collection_parents: - for parent in collection_parents[member.name]: - if isinstance(member, bpy.types.Object): - parent.objects.link(member) - elif isinstance(member, bpy.types.Collection): - parent.children.link(member) - if member.name in member_transforms and isinstance( - member, bpy.types.Object - ): - member.matrix_basis = member_transforms[member.name] - - avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) - avalon_container.children.link(asset_group) - - # Restore the old data, but reset members, as they don't exist anymore - # This avoids a crash, because the memory addresses of those members - # are not valid anymore - old_data["members"] = [] - asset_group[AVALON_PROPERTY] = old_data - - new_data = { - "libpath": libpath, - "representation": repre_entity["id"], - "parent": repre_entity["versionId"], - "members": members, - } - - imprint(asset_group, new_data) - - def exec_remove(self, container: Dict) -> bool: - """ - Remove an existing container from a Blender scene. - """ - group_name = container["objectName"] - asset_group = bpy.data.collections.get(group_name) - - members = set(asset_group.get(AVALON_PROPERTY).get("members", [])) - - if members: - for attr_name in dir(bpy.data): - attr = getattr(bpy.data, attr_name) - if not isinstance(attr, bpy.types.bpy_prop_collection): - continue - - # ensure to make a list copy because we - # we remove members as we iterate - for data in list(attr): - if data not in members or data == asset_group: - continue - - attr.remove(data) - - bpy.data.collections.remove(asset_group) diff --git a/server_addon/blender/client/ayon_blender/plugins/load/load_cache.py b/server_addon/blender/client/ayon_blender/plugins/load/load_cache.py deleted file mode 100644 index 599610ff39..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/load/load_cache.py +++ /dev/null @@ -1,284 +0,0 @@ -"""Load an asset in Blender from an Alembic file.""" - -from pathlib import Path -from pprint import pformat -from typing import Dict, List, Optional - -import bpy - -from ayon_core.pipeline import ( - get_representation_path, - AVALON_CONTAINER_ID, -) - -from ayon_blender.api.pipeline import ( - AVALON_CONTAINERS, - AVALON_PROPERTY, -) -from ayon_blender.api import plugin, lib - - -class CacheModelLoader(plugin.BlenderLoader): - """Load cache models. - - Stores the imported asset in a collection named after the asset. - - Note: - At least for now it only supports Alembic files. - """ - product_types = {"model", "pointcache", "animation", "usd"} - representations = {"abc", "usd"} - - label = "Load Cache" - icon = "code-fork" - color = "orange" - - def _remove(self, asset_group): - objects = list(asset_group.children) - empties = [] - - for obj in objects: - if obj.type == 'MESH': - for material_slot in list(obj.material_slots): - bpy.data.materials.remove(material_slot.material) - bpy.data.meshes.remove(obj.data) - elif obj.type == 'EMPTY': - objects.extend(obj.children) - empties.append(obj) - - for empty in empties: - bpy.data.objects.remove(empty) - - def _process(self, libpath, asset_group, group_name): - plugin.deselect_all() - - relative = bpy.context.preferences.filepaths.use_relative_paths - - if any(libpath.lower().endswith(ext) - for ext in [".usd", ".usda", ".usdc"]): - # USD - bpy.ops.wm.usd_import( - filepath=libpath, - relative_path=relative - ) - - else: - # Alembic - bpy.ops.wm.alembic_import( - filepath=libpath, - relative_path=relative - ) - - imported = lib.get_selection() - - # Use first EMPTY without parent as container - container = next( - (obj for obj in imported - if obj.type == "EMPTY" and not obj.parent), - None - ) - - objects = [] - if container: - nodes = list(container.children) - - for obj in nodes: - obj.parent = asset_group - - bpy.data.objects.remove(container) - - objects.extend(nodes) - for obj in nodes: - objects.extend(obj.children_recursive) - else: - for obj in imported: - obj.parent = asset_group - objects = imported - - for obj in objects: - # Unlink the object from all collections - collections = obj.users_collection - for collection in collections: - collection.objects.unlink(obj) - name = obj.name - obj.name = f"{group_name}:{name}" - if obj.type != 'EMPTY': - name_data = obj.data.name - obj.data.name = f"{group_name}:{name_data}" - - for material_slot in obj.material_slots: - name_mat = material_slot.material.name - material_slot.material.name = f"{group_name}:{name_mat}" - - if not obj.get(AVALON_PROPERTY): - obj[AVALON_PROPERTY] = {} - - avalon_info = obj[AVALON_PROPERTY] - avalon_info.update({"container_name": group_name}) - - plugin.deselect_all() - - return objects - - def _link_objects(self, objects, collection, containers, asset_group): - # Link the imported objects to any collection where the asset group is - # linked to, except the AVALON_CONTAINERS collection - group_collections = [ - collection - for collection in asset_group.users_collection - if collection != containers] - - for obj in objects: - for collection in group_collections: - collection.objects.link(obj) - - def process_asset( - self, context: dict, name: str, namespace: Optional[str] = None, - options: Optional[Dict] = None - ) -> Optional[List]: - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - - libpath = self.filepath_from_context(context) - folder_name = context["folder"]["name"] - product_name = context["product"]["name"] - - asset_name = plugin.prepare_scene_name(folder_name, product_name) - unique_number = plugin.get_unique_number(folder_name, product_name) - group_name = plugin.prepare_scene_name( - folder_name, product_name, unique_number - ) - namespace = namespace or f"{folder_name}_{unique_number}" - - containers = bpy.data.collections.get(AVALON_CONTAINERS) - if not containers: - containers = bpy.data.collections.new(name=AVALON_CONTAINERS) - bpy.context.scene.collection.children.link(containers) - - asset_group = bpy.data.objects.new(group_name, object_data=None) - asset_group.empty_display_type = 'SINGLE_ARROW' - containers.objects.link(asset_group) - - objects = self._process(libpath, asset_group, group_name) - - # Link the asset group to the active collection - collection = bpy.context.view_layer.active_layer_collection.collection - collection.objects.link(asset_group) - - self._link_objects(objects, asset_group, containers, asset_group) - - product_type = context["product"]["productType"] - asset_group[AVALON_PROPERTY] = { - "schema": "openpype:container-2.0", - "id": AVALON_CONTAINER_ID, - "name": name, - "namespace": namespace or '', - "loader": str(self.__class__.__name__), - "representation": context["representation"]["id"], - "libpath": libpath, - "asset_name": asset_name, - "parent": context["representation"]["versionId"], - "productType": product_type, - "objectName": group_name - } - - self[:] = objects - return objects - - def exec_update(self, container: Dict, context: Dict): - """Update the loaded asset. - - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - - Warning: - No nested collections are supported at the moment! - """ - repre_entity = context["representation"] - object_name = container["objectName"] - asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(repre_entity)) - extension = libpath.suffix.lower() - - self.log.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(repre_entity, indent=2), - ) - - assert asset_group, ( - f"The asset is not loaded: {container['objectName']}" - ) - assert libpath, ( - "No existing library file found for {container['objectName']}" - ) - assert libpath.is_file(), ( - f"The file doesn't exist: {libpath}" - ) - assert extension in plugin.VALID_EXTENSIONS, ( - f"Unsupported file: {libpath}" - ) - - metadata = asset_group.get(AVALON_PROPERTY) - group_libpath = metadata["libpath"] - - normalized_group_libpath = ( - str(Path(bpy.path.abspath(group_libpath)).resolve()) - ) - normalized_libpath = ( - str(Path(bpy.path.abspath(str(libpath))).resolve()) - ) - self.log.debug( - "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_group_libpath, - normalized_libpath, - ) - if normalized_group_libpath == normalized_libpath: - self.log.info("Library already loaded, not updating...") - return - - mat = asset_group.matrix_basis.copy() - self._remove(asset_group) - - objects = self._process(str(libpath), asset_group, object_name) - - containers = bpy.data.collections.get(AVALON_CONTAINERS) - self._link_objects(objects, asset_group, containers, asset_group) - - asset_group.matrix_basis = mat - - metadata["libpath"] = str(libpath) - metadata["representation"] = repre_entity["id"] - - def exec_remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. - - Arguments: - container (openpype:container-1.0): Container to remove, - from `host.ls()`. - - Returns: - bool: Whether the container was deleted. - - Warning: - No nested collections are supported at the moment! - """ - object_name = container["objectName"] - asset_group = bpy.data.objects.get(object_name) - - if not asset_group: - return False - - self._remove(asset_group) - - bpy.data.objects.remove(asset_group) - - return True diff --git a/server_addon/blender/client/ayon_blender/plugins/load/load_camera_abc.py b/server_addon/blender/client/ayon_blender/plugins/load/load_camera_abc.py deleted file mode 100644 index 7305afd423..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/load/load_camera_abc.py +++ /dev/null @@ -1,238 +0,0 @@ -"""Load an asset in Blender from an Alembic file.""" - -from pathlib import Path -from pprint import pformat -from typing import Dict, List, Optional - -import bpy - -from ayon_core.pipeline import ( - get_representation_path, - AVALON_CONTAINER_ID, -) -from ayon_blender.api import plugin, lib -from ayon_blender.api.pipeline import ( - AVALON_CONTAINERS, - AVALON_PROPERTY, -) - - -class AbcCameraLoader(plugin.BlenderLoader): - """Load a camera from Alembic file. - - Stores the imported asset in an empty named after the asset. - """ - - product_types = {"camera"} - representations = {"abc"} - - label = "Load Camera (ABC)" - icon = "code-fork" - color = "orange" - - def _remove(self, asset_group): - objects = list(asset_group.children) - - for obj in objects: - if obj.type == "CAMERA": - bpy.data.cameras.remove(obj.data) - elif obj.type == "EMPTY": - objects.extend(obj.children) - bpy.data.objects.remove(obj) - - def _process(self, libpath, asset_group, group_name): - plugin.deselect_all() - - # Force the creation of the transform cache even if the camera - # doesn't have an animation. We use the cache to update the camera. - bpy.ops.wm.alembic_import( - filepath=libpath, always_add_cache_reader=True) - - objects = lib.get_selection() - - for obj in objects: - obj.parent = asset_group - - for obj in objects: - name = obj.name - obj.name = f"{group_name}:{name}" - if obj.type != "EMPTY": - name_data = obj.data.name - obj.data.name = f"{group_name}:{name_data}" - - if not obj.get(AVALON_PROPERTY): - obj[AVALON_PROPERTY] = dict() - - avalon_info = obj[AVALON_PROPERTY] - avalon_info.update({"container_name": group_name}) - - plugin.deselect_all() - - return objects - - def process_asset( - self, - context: dict, - name: str, - namespace: Optional[str] = None, - options: Optional[Dict] = None, - ) -> Optional[List]: - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - - libpath = self.filepath_from_context(context) - - folder_name = context["folder"]["name"] - product_name = context["product"]["name"] - - asset_name = plugin.prepare_scene_name(folder_name, product_name) - unique_number = plugin.get_unique_number(folder_name, product_name) - group_name = plugin.prepare_scene_name( - folder_name, product_name, unique_number - ) - namespace = namespace or f"{folder_name}_{unique_number}" - - avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) - if not avalon_container: - avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) - bpy.context.scene.collection.children.link(avalon_container) - - asset_group = bpy.data.objects.new(group_name, object_data=None) - avalon_container.objects.link(asset_group) - - self._process(libpath, asset_group, group_name) - - objects = [] - nodes = list(asset_group.children) - - for obj in nodes: - objects.append(obj) - nodes.extend(list(obj.children)) - - bpy.context.scene.collection.objects.link(asset_group) - - asset_group[AVALON_PROPERTY] = { - "schema": "openpype:container-2.0", - "id": AVALON_CONTAINER_ID, - "name": name, - "namespace": namespace or "", - "loader": str(self.__class__.__name__), - "representation": context["representation"]["id"], - "libpath": libpath, - "asset_name": asset_name, - "parent": context["representation"]["versionId"], - "productType": context["product"]["productType"], - "objectName": group_name, - } - - self[:] = objects - return objects - - def exec_update(self, container: Dict, context: Dict): - """Update the loaded asset. - - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - - Warning: - No nested collections are supported at the moment! - """ - repre_entity = context["representation"] - object_name = container["objectName"] - asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(repre_entity)) - extension = libpath.suffix.lower() - - self.log.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(repre_entity, indent=2), - ) - - assert asset_group, ( - f"The asset is not loaded: {container['objectName']}") - assert libpath, ( - f"No existing library file found for {container['objectName']}") - assert libpath.is_file(), f"The file doesn't exist: {libpath}" - assert extension in plugin.VALID_EXTENSIONS, ( - f"Unsupported file: {libpath}") - - metadata = asset_group.get(AVALON_PROPERTY) - group_libpath = metadata["libpath"] - - normalized_group_libpath = str( - Path(bpy.path.abspath(group_libpath)).resolve()) - normalized_libpath = str( - Path(bpy.path.abspath(str(libpath))).resolve()) - self.log.debug( - "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_group_libpath, - normalized_libpath, - ) - if normalized_group_libpath == normalized_libpath: - self.log.info("Library already loaded, not updating...") - return - - for obj in asset_group.children: - found = False - for constraint in obj.constraints: - if constraint.type == "TRANSFORM_CACHE": - constraint.cache_file.filepath = libpath.as_posix() - found = True - break - if not found: - # This is to keep compatibility with cameras loaded with - # the old loader - # Create a new constraint for the cache file - constraint = obj.constraints.new("TRANSFORM_CACHE") - bpy.ops.cachefile.open(filepath=libpath.as_posix()) - constraint.cache_file = bpy.data.cache_files[-1] - constraint.cache_file.scale = 1.0 - - # This is a workaround to set the object path. Blender doesn't - # load the list of object paths until the object is evaluated. - # This is a hack to force the object to be evaluated. - # The modifier doesn't need to be removed because camera - # objects don't have modifiers. - obj.modifiers.new( - name='MeshSequenceCache', type='MESH_SEQUENCE_CACHE') - bpy.context.evaluated_depsgraph_get() - - constraint.object_path = ( - constraint.cache_file.object_paths[0].path) - - metadata["libpath"] = str(libpath) - metadata["representation"] = repre_entity["id"] - - def exec_remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. - - Arguments: - container (openpype:container-1.0): Container to remove, - from `host.ls()`. - - Returns: - bool: Whether the container was deleted. - - Warning: - No nested collections are supported at the moment! - """ - object_name = container["objectName"] - asset_group = bpy.data.objects.get(object_name) - - if not asset_group: - return False - - self._remove(asset_group) - - bpy.data.objects.remove(asset_group) - - return True diff --git a/server_addon/blender/client/ayon_blender/plugins/load/load_camera_fbx.py b/server_addon/blender/client/ayon_blender/plugins/load/load_camera_fbx.py deleted file mode 100644 index d2900c6c3f..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/load/load_camera_fbx.py +++ /dev/null @@ -1,224 +0,0 @@ -"""Load an asset in Blender from an Alembic file.""" - -from pathlib import Path -from pprint import pformat -from typing import Dict, List, Optional - -import bpy - -from ayon_core.pipeline import ( - get_representation_path, - AVALON_CONTAINER_ID, -) -from ayon_blender.api import plugin, lib -from ayon_blender.api.pipeline import ( - AVALON_CONTAINERS, - AVALON_PROPERTY, -) - - -class FbxCameraLoader(plugin.BlenderLoader): - """Load a camera from FBX. - - Stores the imported asset in an empty named after the asset. - """ - - product_types = {"camera"} - representations = {"fbx"} - - label = "Load Camera (FBX)" - icon = "code-fork" - color = "orange" - - def _remove(self, asset_group): - objects = list(asset_group.children) - - for obj in objects: - if obj.type == 'CAMERA': - bpy.data.cameras.remove(obj.data) - elif obj.type == 'EMPTY': - objects.extend(obj.children) - bpy.data.objects.remove(obj) - - def _process(self, libpath, asset_group, group_name): - plugin.deselect_all() - - collection = bpy.context.view_layer.active_layer_collection.collection - - bpy.ops.import_scene.fbx(filepath=libpath) - - parent = bpy.context.scene.collection - - objects = lib.get_selection() - - for obj in objects: - obj.parent = asset_group - - for obj in objects: - parent.objects.link(obj) - collection.objects.unlink(obj) - - for obj in objects: - name = obj.name - obj.name = f"{group_name}:{name}" - if obj.type != 'EMPTY': - name_data = obj.data.name - obj.data.name = f"{group_name}:{name_data}" - - if not obj.get(AVALON_PROPERTY): - obj[AVALON_PROPERTY] = dict() - - avalon_info = obj[AVALON_PROPERTY] - avalon_info.update({"container_name": group_name}) - - plugin.deselect_all() - - return objects - - def process_asset( - self, context: dict, name: str, namespace: Optional[str] = None, - options: Optional[Dict] = None - ) -> Optional[List]: - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - libpath = self.filepath_from_context(context) - folder_name = context["folder"]["name"] - product_name = context["product"]["name"] - - asset_name = plugin.prepare_scene_name(folder_name, product_name) - unique_number = plugin.get_unique_number(folder_name, product_name) - group_name = plugin.prepare_scene_name( - folder_name, product_name, unique_number - ) - namespace = namespace or f"{folder_name}_{unique_number}" - - avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) - if not avalon_container: - avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) - bpy.context.scene.collection.children.link(avalon_container) - - asset_group = bpy.data.objects.new(group_name, object_data=None) - avalon_container.objects.link(asset_group) - - self._process(libpath, asset_group, group_name) - - objects = [] - nodes = list(asset_group.children) - - for obj in nodes: - objects.append(obj) - nodes.extend(list(obj.children)) - - bpy.context.scene.collection.objects.link(asset_group) - - asset_group[AVALON_PROPERTY] = { - "schema": "openpype:container-2.0", - "id": AVALON_CONTAINER_ID, - "name": name, - "namespace": namespace or '', - "loader": str(self.__class__.__name__), - "representation": context["representation"]["id"], - "libpath": libpath, - "asset_name": asset_name, - "parent": context["representation"]["versionId"], - "productType": context["product"]["productType"], - "objectName": group_name - } - - self[:] = objects - return objects - - def exec_update(self, container: Dict, context: Dict): - """Update the loaded asset. - - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - - Warning: - No nested collections are supported at the moment! - """ - repre_entity = context["representation"] - object_name = container["objectName"] - asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(repre_entity)) - extension = libpath.suffix.lower() - - self.log.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(repre_entity, indent=2), - ) - - assert asset_group, ( - f"The asset is not loaded: {container['objectName']}" - ) - assert libpath, ( - "No existing library file found for {container['objectName']}" - ) - assert libpath.is_file(), ( - f"The file doesn't exist: {libpath}" - ) - assert extension in plugin.VALID_EXTENSIONS, ( - f"Unsupported file: {libpath}" - ) - - metadata = asset_group.get(AVALON_PROPERTY) - group_libpath = metadata["libpath"] - - normalized_group_libpath = ( - str(Path(bpy.path.abspath(group_libpath)).resolve()) - ) - normalized_libpath = ( - str(Path(bpy.path.abspath(str(libpath))).resolve()) - ) - self.log.debug( - "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_group_libpath, - normalized_libpath, - ) - if normalized_group_libpath == normalized_libpath: - self.log.info("Library already loaded, not updating...") - return - - mat = asset_group.matrix_basis.copy() - - self._remove(asset_group) - self._process(str(libpath), asset_group, object_name) - - asset_group.matrix_basis = mat - - metadata["libpath"] = str(libpath) - metadata["representation"] = repre_entity["id"] - - def exec_remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. - - Arguments: - container (openpype:container-1.0): Container to remove, - from `host.ls()`. - - Returns: - bool: Whether the container was deleted. - - Warning: - No nested collections are supported at the moment! - """ - object_name = container["objectName"] - asset_group = bpy.data.objects.get(object_name) - - if not asset_group: - return False - - self._remove(asset_group) - - bpy.data.objects.remove(asset_group) - - return True diff --git a/server_addon/blender/client/ayon_blender/plugins/load/load_fbx.py b/server_addon/blender/client/ayon_blender/plugins/load/load_fbx.py deleted file mode 100644 index fe3d747dab..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/load/load_fbx.py +++ /dev/null @@ -1,279 +0,0 @@ -"""Load an asset in Blender from an Alembic file.""" - -from pathlib import Path -from pprint import pformat -from typing import Dict, List, Optional - -import bpy - -from ayon_core.pipeline import ( - get_representation_path, - AVALON_CONTAINER_ID, -) -from ayon_blender.api import plugin, lib -from ayon_blender.api.pipeline import ( - AVALON_CONTAINERS, - AVALON_PROPERTY, -) - - -class FbxModelLoader(plugin.BlenderLoader): - """Load FBX models. - - Stores the imported asset in an empty named after the asset. - """ - - product_types = {"model", "rig"} - representations = {"fbx"} - - label = "Load FBX" - icon = "code-fork" - color = "orange" - - def _remove(self, asset_group): - objects = list(asset_group.children) - - for obj in objects: - if obj.type == 'MESH': - for material_slot in list(obj.material_slots): - if material_slot.material: - bpy.data.materials.remove(material_slot.material) - bpy.data.meshes.remove(obj.data) - elif obj.type == 'ARMATURE': - objects.extend(obj.children) - bpy.data.armatures.remove(obj.data) - elif obj.type == 'CURVE': - bpy.data.curves.remove(obj.data) - elif obj.type == 'EMPTY': - objects.extend(obj.children) - bpy.data.objects.remove(obj) - - def _process(self, libpath, asset_group, group_name, action): - plugin.deselect_all() - - collection = bpy.context.view_layer.active_layer_collection.collection - - bpy.ops.import_scene.fbx(filepath=libpath) - - parent = bpy.context.scene.collection - - imported = lib.get_selection() - - empties = [obj for obj in imported if obj.type == 'EMPTY'] - - container = None - - for empty in empties: - if not empty.parent: - container = empty - break - - assert container, "No asset group found" - - # Children must be linked before parents, - # otherwise the hierarchy will break - objects = [] - nodes = list(container.children) - - for obj in nodes: - obj.parent = asset_group - - bpy.data.objects.remove(container) - - for obj in nodes: - objects.append(obj) - nodes.extend(list(obj.children)) - - objects.reverse() - - for obj in objects: - parent.objects.link(obj) - collection.objects.unlink(obj) - - for obj in objects: - name = obj.name - obj.name = f"{group_name}:{name}" - if obj.type != 'EMPTY': - name_data = obj.data.name - obj.data.name = f"{group_name}:{name_data}" - - if obj.type == 'MESH': - for material_slot in obj.material_slots: - name_mat = material_slot.material.name - material_slot.material.name = f"{group_name}:{name_mat}" - elif obj.type == 'ARMATURE': - anim_data = obj.animation_data - if action is not None: - anim_data.action = action - elif anim_data.action is not None: - name_action = anim_data.action.name - anim_data.action.name = f"{group_name}:{name_action}" - - if not obj.get(AVALON_PROPERTY): - obj[AVALON_PROPERTY] = dict() - - avalon_info = obj[AVALON_PROPERTY] - avalon_info.update({"container_name": group_name}) - - plugin.deselect_all() - - return objects - - def process_asset( - self, context: dict, name: str, namespace: Optional[str] = None, - options: Optional[Dict] = None - ) -> Optional[List]: - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - libpath = self.filepath_from_context(context) - folder_name = context["folder"]["name"] - product_name = context["product"]["name"] - - asset_name = plugin.prepare_scene_name(folder_name, product_name) - unique_number = plugin.get_unique_number(folder_name, product_name) - group_name = plugin.prepare_scene_name( - folder_name, product_name, unique_number - ) - namespace = namespace or f"{folder_name}_{unique_number}" - - avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) - if not avalon_container: - avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) - bpy.context.scene.collection.children.link(avalon_container) - - asset_group = bpy.data.objects.new(group_name, object_data=None) - avalon_container.objects.link(asset_group) - - objects = self._process(libpath, asset_group, group_name, None) - - objects = [] - nodes = list(asset_group.children) - - for obj in nodes: - objects.append(obj) - nodes.extend(list(obj.children)) - - bpy.context.scene.collection.objects.link(asset_group) - - asset_group[AVALON_PROPERTY] = { - "schema": "openpype:container-2.0", - "id": AVALON_CONTAINER_ID, - "name": name, - "namespace": namespace or '', - "loader": str(self.__class__.__name__), - "representation": context["representation"]["id"], - "libpath": libpath, - "asset_name": asset_name, - "parent": context["representation"]["versionId"], - "productType": context["product"]["productType"], - "objectName": group_name - } - - self[:] = objects - return objects - - def exec_update(self, container: Dict, context: Dict): - """Update the loaded asset. - - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - - Warning: - No nested collections are supported at the moment! - """ - repre_entity = context["representation"] - object_name = container["objectName"] - asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(repre_entity)) - extension = libpath.suffix.lower() - - self.log.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(repre_entity, indent=2), - ) - - assert asset_group, ( - f"The asset is not loaded: {container['objectName']}" - ) - assert libpath, ( - "No existing library file found for {container['objectName']}" - ) - assert libpath.is_file(), ( - f"The file doesn't exist: {libpath}" - ) - assert extension in plugin.VALID_EXTENSIONS, ( - f"Unsupported file: {libpath}" - ) - - metadata = asset_group.get(AVALON_PROPERTY) - group_libpath = metadata["libpath"] - - normalized_group_libpath = ( - str(Path(bpy.path.abspath(group_libpath)).resolve()) - ) - normalized_libpath = ( - str(Path(bpy.path.abspath(str(libpath))).resolve()) - ) - self.log.debug( - "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_group_libpath, - normalized_libpath, - ) - if normalized_group_libpath == normalized_libpath: - self.log.info("Library already loaded, not updating...") - return - - # Get the armature of the rig - objects = asset_group.children - armatures = [obj for obj in objects if obj.type == 'ARMATURE'] - action = None - - if armatures: - armature = armatures[0] - - if armature.animation_data and armature.animation_data.action: - action = armature.animation_data.action - - mat = asset_group.matrix_basis.copy() - self._remove(asset_group) - - self._process(str(libpath), asset_group, object_name, action) - - asset_group.matrix_basis = mat - - metadata["libpath"] = str(libpath) - metadata["representation"] = repre_entity["id"] - - def exec_remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. - - Arguments: - container (openpype:container-1.0): Container to remove, - from `host.ls()`. - - Returns: - bool: Whether the container was deleted. - - Warning: - No nested collections are supported at the moment! - """ - object_name = container["objectName"] - asset_group = bpy.data.objects.get(object_name) - - if not asset_group: - return False - - self._remove(asset_group) - - bpy.data.objects.remove(asset_group) - - return True diff --git a/server_addon/blender/client/ayon_blender/plugins/load/load_layout_json.py b/server_addon/blender/client/ayon_blender/plugins/load/load_layout_json.py deleted file mode 100644 index 9a2d17b4fc..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/load/load_layout_json.py +++ /dev/null @@ -1,297 +0,0 @@ -"""Load a layout in Blender.""" - -import json -from pathlib import Path -from pprint import pformat -from typing import Dict, Optional - -import bpy - -from ayon_core.pipeline import ( - discover_loader_plugins, - remove_container, - load_container, - get_representation_path, - loaders_from_representation, - AVALON_CONTAINER_ID, -) -from ayon_blender.api.pipeline import ( - AVALON_INSTANCES, - AVALON_CONTAINERS, - AVALON_PROPERTY, -) -from ayon_blender.api import plugin - - -class JsonLayoutLoader(plugin.BlenderLoader): - """Load layout published from Unreal.""" - - product_types = {"layout"} - representations = {"json"} - - label = "Load Layout" - icon = "code-fork" - color = "orange" - - animation_creator_name = "CreateAnimation" - - def _remove(self, asset_group): - objects = list(asset_group.children) - - for obj in objects: - remove_container(obj.get(AVALON_PROPERTY)) - - def _remove_animation_instances(self, asset_group): - instances = bpy.data.collections.get(AVALON_INSTANCES) - if instances: - for obj in list(asset_group.children): - anim_collection = instances.children.get( - obj.name + "_animation") - if anim_collection: - bpy.data.collections.remove(anim_collection) - - def _get_loader(self, loaders, product_type): - name = "" - if product_type == 'rig': - name = "BlendRigLoader" - elif product_type == 'model': - name = "BlendModelLoader" - - if name == "": - return None - - for loader in loaders: - if loader.__name__ == name: - return loader - - return None - - def _process(self, libpath, asset, asset_group, actions): - plugin.deselect_all() - - with open(libpath, "r") as fp: - data = json.load(fp) - - all_loaders = discover_loader_plugins() - - for element in data: - reference = element.get('reference') - product_type = element.get("product_type") - if product_type is None: - product_type = element.get("family") - - loaders = loaders_from_representation(all_loaders, reference) - loader = self._get_loader(loaders, product_type) - - if not loader: - continue - - instance_name = element.get('instance_name') - - action = None - - if actions: - action = actions.get(instance_name, None) - - options = { - 'parent': asset_group, - 'transform': element.get('transform'), - 'action': action, - 'create_animation': True if product_type == 'rig' else False, - 'animation_asset': asset - } - - if element.get('animation'): - options['animation_file'] = str(Path(libpath).with_suffix( - '')) + "." + element.get('animation') - - # This should return the loaded asset, but the load call will be - # added to the queue to run in the Blender main thread, so - # at this time it will not return anything. The assets will be - # loaded in the next Blender cycle, so we use the options to - # set the transform, parent and assign the action, if there is one. - load_container( - loader, - reference, - namespace=instance_name, - options=options - ) - - # Camera creation when loading a layout is not necessary for now, - # but the code is worth keeping in case we need it in the future. - # # Create the camera asset and the camera instance - # creator_plugin = get_legacy_creator_by_name("CreateCamera") - # if not creator_plugin: - # raise ValueError("Creator plugin \"CreateCamera\" was " - # "not found.") - - # TODO: Refactor legacy create usage to new style creators - # legacy_create( - # creator_plugin, - # name="camera", - # # name=f"{unique_number}_{product[name]}_animation", - # asset=asset, - # options={"useSelection": False} - # # data={"dependencies": context["representation"]["id"]} - # ) - - def process_asset(self, - context: dict, - name: str, - namespace: Optional[str] = None, - options: Optional[Dict] = None): - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - libpath = self.filepath_from_context(context) - folder_name = context["folder"]["name"] - product_name = context["product"]["name"] - - asset_name = plugin.prepare_scene_name(folder_name, product_name) - unique_number = plugin.get_unique_number(folder_name, product_name) - group_name = plugin.prepare_scene_name( - folder_name, product_name, unique_number - ) - namespace = namespace or f"{folder_name}_{unique_number}" - - avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) - if not avalon_container: - avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) - bpy.context.scene.collection.children.link(avalon_container) - - asset_group = bpy.data.objects.new(group_name, object_data=None) - asset_group.empty_display_type = 'SINGLE_ARROW' - avalon_container.objects.link(asset_group) - - self._process(libpath, asset_name, asset_group, None) - - bpy.context.scene.collection.objects.link(asset_group) - - asset_group[AVALON_PROPERTY] = { - "schema": "openpype:container-2.0", - "id": AVALON_CONTAINER_ID, - "name": name, - "namespace": namespace or '', - "loader": str(self.__class__.__name__), - "representation": context["representation"]["id"], - "libpath": libpath, - "asset_name": asset_name, - "parent": context["representation"]["versionId"], - "productType": context["product"]["productType"], - "objectName": group_name - } - - self[:] = asset_group.children - return asset_group.children - - def exec_update(self, container: Dict, context: Dict): - """Update the loaded asset. - - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - """ - repre_entity = context["representation"] - object_name = container["objectName"] - asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(repre_entity)) - extension = libpath.suffix.lower() - - self.log.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(repre_entity, indent=2), - ) - - assert asset_group, ( - f"The asset is not loaded: {container['objectName']}" - ) - assert libpath, ( - "No existing library file found for {container['objectName']}" - ) - assert libpath.is_file(), ( - f"The file doesn't exist: {libpath}" - ) - assert extension in plugin.VALID_EXTENSIONS, ( - f"Unsupported file: {libpath}" - ) - - metadata = asset_group.get(AVALON_PROPERTY) - group_libpath = metadata["libpath"] - - normalized_group_libpath = ( - str(Path(bpy.path.abspath(group_libpath)).resolve()) - ) - normalized_libpath = ( - str(Path(bpy.path.abspath(str(libpath))).resolve()) - ) - self.log.debug( - "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_group_libpath, - normalized_libpath, - ) - if normalized_group_libpath == normalized_libpath: - self.log.info("Library already loaded, not updating...") - return - - actions = {} - - for obj in asset_group.children: - obj_meta = obj.get(AVALON_PROPERTY) - product_type = obj_meta.get("productType") - if product_type is None: - product_type = obj_meta.get("family") - if product_type == "rig": - rig = None - for child in obj.children: - if child.type == 'ARMATURE': - rig = child - break - if not rig: - raise Exception("No armature in the rig asset group.") - if rig.animation_data and rig.animation_data.action: - namespace = obj_meta.get('namespace') - actions[namespace] = rig.animation_data.action - - mat = asset_group.matrix_basis.copy() - - self._remove_animation_instances(asset_group) - - self._remove(asset_group) - - self._process(str(libpath), asset_group, actions) - - asset_group.matrix_basis = mat - - metadata["libpath"] = str(libpath) - metadata["representation"] = repre_entity["id"] - - def exec_remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. - - Arguments: - container (openpype:container-1.0): Container to remove, - from `host.ls()`. - - Returns: - bool: Whether the container was deleted. - """ - object_name = container["objectName"] - asset_group = bpy.data.objects.get(object_name) - - if not asset_group: - return False - - self._remove_animation_instances(asset_group) - - self._remove(asset_group) - - bpy.data.objects.remove(asset_group) - - return True diff --git a/server_addon/blender/client/ayon_blender/plugins/load/load_look.py b/server_addon/blender/client/ayon_blender/plugins/load/load_look.py deleted file mode 100644 index d214917d3e..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/load/load_look.py +++ /dev/null @@ -1,223 +0,0 @@ -"""Load a model asset in Blender.""" - -from pathlib import Path -from pprint import pformat -from typing import Dict, List, Optional - -import os -import json -import bpy - -from ayon_core.pipeline import get_representation_path -from ayon_blender.api import plugin -from ayon_blender.api.pipeline import ( - containerise_existing, - AVALON_PROPERTY -) - - -class BlendLookLoader(plugin.BlenderLoader): - """Load models from a .blend file. - - Because they come from a .blend file we can simply link the collection that - contains the model. There is no further need to 'containerise' it. - """ - - product_types = {"look"} - representations = {"json"} - - label = "Load Look" - icon = "code-fork" - color = "orange" - - def get_all_children(self, obj): - children = list(obj.children) - - for child in children: - children.extend(child.children) - - return children - - def _process(self, libpath, container_name, objects): - with open(libpath, "r") as fp: - data = json.load(fp) - - path = os.path.dirname(libpath) - materials_path = f"{path}/resources" - - materials = [] - - for entry in data: - file = entry.get('fbx_filename') - if file is None: - continue - - bpy.ops.import_scene.fbx(filepath=f"{materials_path}/{file}") - - mesh = [o for o in bpy.context.scene.objects if o.select_get()][0] - material = mesh.data.materials[0] - material.name = f"{material.name}:{container_name}" - - texture_file = entry.get('tga_filename') - if texture_file: - node_tree = material.node_tree - pbsdf = node_tree.nodes['Principled BSDF'] - base_color = pbsdf.inputs[0] - tex_node = base_color.links[0].from_node - tex_node.image.filepath = f"{materials_path}/{texture_file}" - - materials.append(material) - - for obj in objects: - for child in self.get_all_children(obj): - mesh_name = child.name.split(':')[0] - if mesh_name == material.name.split(':')[0]: - child.data.materials.clear() - child.data.materials.append(material) - break - - bpy.data.objects.remove(mesh) - - return materials, objects - - def process_asset( - self, context: dict, name: str, namespace: Optional[str] = None, - options: Optional[Dict] = None - ) -> Optional[List]: - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - - libpath = self.filepath_from_context(context) - folder_name = context["folder"]["name"] - product_name = context["product"]["name"] - - lib_container = plugin.prepare_scene_name( - folder_name, product_name - ) - unique_number = plugin.get_unique_number( - folder_name, product_name - ) - namespace = namespace or f"{folder_name}_{unique_number}" - container_name = plugin.prepare_scene_name( - folder_name, product_name, unique_number - ) - - container = bpy.data.collections.new(lib_container) - container.name = container_name - containerise_existing( - container, - name, - namespace, - context, - self.__class__.__name__, - ) - - metadata = container.get(AVALON_PROPERTY) - - metadata["libpath"] = libpath - metadata["lib_container"] = lib_container - - selected = [o for o in bpy.context.scene.objects if o.select_get()] - - materials, objects = self._process(libpath, container_name, selected) - - # Save the list of imported materials in the metadata container - metadata["objects"] = objects - metadata["materials"] = materials - - metadata["parent"] = context["representation"]["versionId"] - metadata["product_type"] = context["product"]["productType"] - - nodes = list(container.objects) - nodes.append(container) - self[:] = nodes - return nodes - - def update(self, container: Dict, context: Dict): - collection = bpy.data.collections.get(container["objectName"]) - repre_entity = context["representation"] - libpath = Path(get_representation_path(repre_entity)) - extension = libpath.suffix.lower() - - self.log.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(repre_entity, indent=2), - ) - - assert collection, ( - f"The asset is not loaded: {container['objectName']}" - ) - assert not (collection.children), ( - "Nested collections are not supported." - ) - assert libpath, ( - "No existing library file found for {container['objectName']}" - ) - assert libpath.is_file(), ( - f"The file doesn't exist: {libpath}" - ) - assert extension in plugin.VALID_EXTENSIONS, ( - f"Unsupported file: {libpath}" - ) - - collection_metadata = collection.get(AVALON_PROPERTY) - collection_libpath = collection_metadata["libpath"] - - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) - ) - normalized_libpath = ( - str(Path(bpy.path.abspath(str(libpath))).resolve()) - ) - self.log.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, - normalized_libpath, - ) - if normalized_collection_libpath == normalized_libpath: - self.log.info("Library already loaded, not updating...") - return - - for obj in collection_metadata['objects']: - for child in self.get_all_children(obj): - child.data.materials.clear() - - for material in collection_metadata['materials']: - bpy.data.materials.remove(material) - - namespace = collection_metadata['namespace'] - name = collection_metadata['name'] - - container_name = f"{namespace}_{name}" - - materials, objects = self._process( - libpath, container_name, collection_metadata['objects']) - - collection_metadata["objects"] = objects - collection_metadata["materials"] = materials - collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = repre_entity["id"] - - def remove(self, container: Dict) -> bool: - collection = bpy.data.collections.get(container["objectName"]) - if not collection: - return False - - collection_metadata = collection.get(AVALON_PROPERTY) - - for obj in collection_metadata['objects']: - for child in self.get_all_children(obj): - child.data.materials.clear() - - for material in collection_metadata['materials']: - bpy.data.materials.remove(material) - - bpy.data.collections.remove(collection) - - return True diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/collect_current_file.py b/server_addon/blender/client/ayon_blender/plugins/publish/collect_current_file.py deleted file mode 100644 index 6568372169..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/collect_current_file.py +++ /dev/null @@ -1,15 +0,0 @@ -import pyblish.api -from ayon_blender.api import workio, plugin - - -class CollectBlenderCurrentFile(plugin.BlenderContextPlugin): - """Inject the current working file into context""" - - order = pyblish.api.CollectorOrder - 0.5 - label = "Blender Current File" - hosts = ["blender"] - - def process(self, context): - """Inject the current working file""" - current_file = workio.current_file() - context.data["currentFile"] = current_file diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/collect_file_dependencies.py b/server_addon/blender/client/ayon_blender/plugins/publish/collect_file_dependencies.py deleted file mode 100644 index ea36ab459c..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/collect_file_dependencies.py +++ /dev/null @@ -1,36 +0,0 @@ -from pathlib import Path - -import pyblish.api - -import bpy - - -class CollectFileDependencies(pyblish.api.ContextPlugin): - """Gather all files referenced in this scene.""" - - label = "Collect File Dependencies" - order = pyblish.api.CollectorOrder - 0.49 - hosts = ["blender"] - families = ["render"] - - @classmethod - def apply_settings(cls, project_settings): - # Disable plug-in if not used for deadline submission anyway - settings = project_settings["deadline"]["publish"]["BlenderSubmitDeadline"] # noqa - cls.enabled = settings.get("asset_dependencies", True) - - def process(self, context): - dependencies = set() - - # Add alembic files as dependencies - for cache in bpy.data.cache_files: - dependencies.add( - Path(bpy.path.abspath(cache.filepath)).resolve().as_posix()) - - # Add image files as dependencies - for image in bpy.data.images: - if image.filepath: - dependencies.add(Path( - bpy.path.abspath(image.filepath)).resolve().as_posix()) - - context.data["fileDependencies"] = list(dependencies) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/collect_instance.py b/server_addon/blender/client/ayon_blender/plugins/publish/collect_instance.py deleted file mode 100644 index 7d6f841ba3..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/collect_instance.py +++ /dev/null @@ -1,44 +0,0 @@ -import bpy - -import pyblish.api - -from ayon_core.pipeline.publish import KnownPublishError -from ayon_blender.api import plugin -from ayon_blender.api.pipeline import AVALON_PROPERTY - - -class CollectBlenderInstanceData(plugin.BlenderInstancePlugin): - """Validator to verify that the instance is not empty""" - - order = pyblish.api.CollectorOrder - hosts = ["blender"] - families = ["model", "pointcache", "animation", "rig", "camera", "layout", - "blendScene", "usd"] - label = "Collect Instance" - - def process(self, instance): - instance_node = instance.data["transientData"]["instance_node"] - - # Collect members of the instance - members = [instance_node] - if isinstance(instance_node, bpy.types.Collection): - members.extend(instance_node.objects) - members.extend(instance_node.children) - - # Special case for animation instances, include armatures - if instance.data["productType"] == "animation": - for obj in instance_node.objects: - if obj.type == 'EMPTY' and obj.get(AVALON_PROPERTY): - members.extend( - child for child in obj.children - if child.type == 'ARMATURE' - ) - elif isinstance(instance_node, bpy.types.Object): - members.extend(instance_node.children_recursive) - else: - raise KnownPublishError( - f"Unsupported instance node type '{type(instance_node)}' " - f"for instance '{instance}'" - ) - - instance[:] = members diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/collect_render.py b/server_addon/blender/client/ayon_blender/plugins/publish/collect_render.py deleted file mode 100644 index ac5dc5bf6f..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/collect_render.py +++ /dev/null @@ -1,120 +0,0 @@ -# -*- coding: utf-8 -*- -"""Collect render data.""" - -import os -import re - -import bpy -import pyblish.api - -from ayon_blender.api import colorspace, plugin - - -class CollectBlenderRender(plugin.BlenderInstancePlugin): - """Gather all publishable render instances.""" - - order = pyblish.api.CollectorOrder + 0.01 - hosts = ["blender"] - families = ["render"] - label = "Collect Render" - sync_workfile_version = False - - @staticmethod - def generate_expected_beauty( - render_product, frame_start, frame_end, frame_step, ext - ): - """ - Generate the expected files for the render product for the beauty - render. This returns a list of files that should be rendered. It - replaces the sequence of `#` with the frame number. - """ - path = os.path.dirname(render_product) - file = os.path.basename(render_product) - - expected_files = [] - - for frame in range(frame_start, frame_end + 1, frame_step): - frame_str = str(frame).rjust(4, "0") - filename = re.sub("#+", frame_str, file) - expected_file = f"{os.path.join(path, filename)}.{ext}" - expected_files.append(expected_file.replace("\\", "/")) - - return { - "beauty": expected_files - } - - @staticmethod - def generate_expected_aovs( - aov_file_product, frame_start, frame_end, frame_step, ext - ): - """ - Generate the expected files for the render product for the beauty - render. This returns a list of files that should be rendered. It - replaces the sequence of `#` with the frame number. - """ - expected_files = {} - - for aov_name, aov_file in aov_file_product: - path = os.path.dirname(aov_file) - file = os.path.basename(aov_file) - - aov_files = [] - - for frame in range(frame_start, frame_end + 1, frame_step): - frame_str = str(frame).rjust(4, "0") - filename = re.sub("#+", frame_str, file) - expected_file = f"{os.path.join(path, filename)}.{ext}" - aov_files.append(expected_file.replace("\\", "/")) - - expected_files[aov_name] = aov_files - - return expected_files - - def process(self, instance): - context = instance.context - - instance_node = instance.data["transientData"]["instance_node"] - render_data = instance_node.get("render_data") - - assert render_data, "No render data found." - - render_product = render_data.get("render_product") - aov_file_product = render_data.get("aov_file_product") - ext = render_data.get("image_format") - multilayer = render_data.get("multilayer_exr") - - frame_start = context.data["frameStart"] - frame_end = context.data["frameEnd"] - frame_handle_start = context.data["frameStartHandle"] - frame_handle_end = context.data["frameEndHandle"] - - expected_beauty = self.generate_expected_beauty( - render_product, int(frame_start), int(frame_end), - int(bpy.context.scene.frame_step), ext) - - expected_aovs = self.generate_expected_aovs( - aov_file_product, int(frame_start), int(frame_end), - int(bpy.context.scene.frame_step), ext) - - expected_files = expected_beauty | expected_aovs - - instance.data.update({ - "families": ["render", "render.farm"], - "frameStart": frame_start, - "frameEnd": frame_end, - "frameStartHandle": frame_handle_start, - "frameEndHandle": frame_handle_end, - "fps": context.data["fps"], - "byFrameStep": bpy.context.scene.frame_step, - "review": render_data.get("review", False), - "multipartExr": ext == "exr" and multilayer, - "farm": True, - "expectedFiles": [expected_files], - # OCIO not currently implemented in Blender, but the following - # settings are required by the schema, so it is hardcoded. - # TODO: Implement OCIO in Blender - "colorspaceConfig": "", - "colorspaceDisplay": "sRGB", - "colorspaceView": "ACES 1.0 SDR-video", - "renderProducts": colorspace.ARenderProduct(), - }) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/collect_review.py b/server_addon/blender/client/ayon_blender/plugins/publish/collect_review.py deleted file mode 100644 index c013910b5a..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/collect_review.py +++ /dev/null @@ -1,68 +0,0 @@ -import bpy -import pyblish.api -from ayon_blender.api import plugin - - -class CollectReview(plugin.BlenderInstancePlugin): - """Collect Review data - - """ - - order = pyblish.api.CollectorOrder + 0.3 - label = "Collect Review Data" - families = ["review"] - - def process(self, instance): - - self.log.debug(f"instance: {instance}") - - datablock = instance.data["transientData"]["instance_node"] - - # get cameras - cameras = [ - obj - for obj in datablock.all_objects - if isinstance(obj, bpy.types.Object) and obj.type == "CAMERA" - ] - - assert len(cameras) == 1, ( - f"Not a single camera found in extraction: {cameras}" - ) - camera = cameras[0].name - self.log.debug(f"camera: {camera}") - - focal_length = cameras[0].data.lens - - # get isolate objects list from meshes instance members. - types = {"MESH", "GPENCIL"} - isolate_objects = [ - obj - for obj in instance - if isinstance(obj, bpy.types.Object) and obj.type in types - ] - - if not instance.data.get("remove"): - # Store focal length in `burninDataMembers` - burninData = instance.data.setdefault("burninDataMembers", {}) - burninData["focalLength"] = focal_length - - instance.data.update({ - "review_camera": camera, - "frameStart": instance.context.data["frameStart"], - "frameEnd": instance.context.data["frameEnd"], - "fps": instance.context.data["fps"], - "isolate": isolate_objects, - }) - - self.log.debug(f"instance data: {instance.data}") - - # TODO : Collect audio - audio_tracks = [] - instance.data["audio"] = [] - for track in audio_tracks: - instance.data["audio"].append( - { - "offset": track.offset.get(), - "filename": track.filename.get(), - } - ) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/collect_workfile.py b/server_addon/blender/client/ayon_blender/plugins/publish/collect_workfile.py deleted file mode 100644 index 347a5caf01..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/collect_workfile.py +++ /dev/null @@ -1,38 +0,0 @@ -from pathlib import Path - -from pyblish.api import CollectorOrder -from ayon_blender.api import plugin - - -class CollectWorkfile(plugin.BlenderInstancePlugin): - """Inject workfile data into its instance.""" - - order = CollectorOrder - label = "Collect Workfile" - hosts = ["blender"] - families = ["workfile"] - - def process(self, instance): - """Process collector.""" - - context = instance.context - filepath = Path(context.data["currentFile"]) - ext = filepath.suffix - - instance.data.update( - { - "setMembers": [filepath.as_posix()], - "frameStart": context.data.get("frameStart", 1), - "frameEnd": context.data.get("frameEnd", 1), - "handleStart": context.data.get("handleStart", 1), - "handledEnd": context.data.get("handleEnd", 1), - "representations": [ - { - "name": ext.lstrip("."), - "ext": ext.lstrip("."), - "files": filepath.name, - "stagingDir": filepath.parent, - } - ], - } - ) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/extract_abc.py b/server_addon/blender/client/ayon_blender/plugins/publish/extract_abc.py deleted file mode 100644 index 5da0258586..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/extract_abc.py +++ /dev/null @@ -1,94 +0,0 @@ -import os - -import bpy - -from ayon_core.lib import BoolDef -from ayon_core.pipeline import publish -from ayon_blender.api import plugin - - -class ExtractABC(plugin.BlenderExtractor, publish.OptionalPyblishPluginMixin): - """Extract as ABC.""" - - label = "Extract ABC" - hosts = ["blender"] - families = ["pointcache"] - - def process(self, instance): - if not self.is_active(instance.data): - return - - attr_values = self.get_attr_values_from_data(instance.data) - - # Define extract output file path - stagingdir = self.staging_dir(instance) - folder_name = instance.data["folderEntity"]["name"] - product_name = instance.data["productName"] - instance_name = f"{folder_name}_{product_name}" - filename = f"{instance_name}.abc" - filepath = os.path.join(stagingdir, filename) - - # Perform extraction - self.log.debug("Performing extraction..") - - plugin.deselect_all() - - asset_group = instance.data["transientData"]["instance_node"] - - selected = [] - for obj in instance: - if isinstance(obj, bpy.types.Object): - obj.select_set(True) - selected.append(obj) - - context = plugin.create_blender_context( - active=asset_group, selected=selected) - - with bpy.context.temp_override(**context): - # We export the abc - bpy.ops.wm.alembic_export( - filepath=filepath, - selected=True, - flatten=False, - subdiv_schema=attr_values.get("subdiv_schema", False) - ) - - plugin.deselect_all() - - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': 'abc', - 'ext': 'abc', - 'files': filename, - "stagingDir": stagingdir, - } - instance.data["representations"].append(representation) - - self.log.debug("Extracted instance '%s' to: %s", - instance.name, representation) - - @classmethod - def get_attribute_defs(cls): - return [ - BoolDef( - "subdiv_schema", - label="Alembic Mesh Subdiv Schema", - tooltip="Export Meshes using Alembic's subdivision schema.\n" - "Enabling this includes creases with the export but " - "excludes the mesh's normals.\n" - "Enabling this usually result in smaller file size " - "due to lack of normals.", - default=False - ) - ] - - -class ExtractModelABC(ExtractABC): - """Extract model as ABC.""" - - label = "Extract Model ABC" - hosts = ["blender"] - families = ["model"] - optional = True diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/extract_abc_animation.py b/server_addon/blender/client/ayon_blender/plugins/publish/extract_abc_animation.py deleted file mode 100644 index 503593c8d3..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/extract_abc_animation.py +++ /dev/null @@ -1,80 +0,0 @@ -import os - -import bpy - -from ayon_core.pipeline import publish -from ayon_blender.api import plugin - - -class ExtractAnimationABC( - plugin.BlenderExtractor, - publish.OptionalPyblishPluginMixin, -): - """Extract as ABC.""" - - label = "Extract Animation ABC" - hosts = ["blender"] - families = ["animation"] - optional = True - - def process(self, instance): - if not self.is_active(instance.data): - return - - # Define extract output file path - stagingdir = self.staging_dir(instance) - folder_name = instance.data["folderEntity"]["name"] - product_name = instance.data["productName"] - instance_name = f"{folder_name}_{product_name}" - filename = f"{instance_name}.abc" - - filepath = os.path.join(stagingdir, filename) - - # Perform extraction - self.log.debug("Performing extraction..") - - plugin.deselect_all() - - selected = [] - asset_group = instance.data["transientData"]["instance_node"] - - objects = [] - for obj in instance: - if isinstance(obj, bpy.types.Collection): - for child in obj.all_objects: - objects.append(child) - for obj in objects: - children = [o for o in bpy.data.objects if o.parent == obj] - for child in children: - objects.append(child) - - for obj in objects: - obj.select_set(True) - selected.append(obj) - - context = plugin.create_blender_context( - active=asset_group, selected=selected) - - with bpy.context.temp_override(**context): - # We export the abc - bpy.ops.wm.alembic_export( - filepath=filepath, - selected=True, - flatten=False - ) - - plugin.deselect_all() - - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': 'abc', - 'ext': 'abc', - 'files': filename, - "stagingDir": stagingdir, - } - instance.data["representations"].append(representation) - - self.log.debug("Extracted instance '%s' to: %s", - instance.name, representation) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/extract_blend.py b/server_addon/blender/client/ayon_blender/plugins/publish/extract_blend.py deleted file mode 100644 index 520bc274a1..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/extract_blend.py +++ /dev/null @@ -1,76 +0,0 @@ -import os - -import bpy - -from ayon_core.pipeline import publish -from ayon_blender.api import plugin - - -class ExtractBlend( - plugin.BlenderExtractor, publish.OptionalPyblishPluginMixin -): - """Extract a blend file.""" - - label = "Extract Blend" - hosts = ["blender"] - families = ["model", "camera", "rig", "action", "layout", "blendScene"] - optional = True - - # From settings - compress = False - - def process(self, instance): - if not self.is_active(instance.data): - return - - # Define extract output file path - - stagingdir = self.staging_dir(instance) - folder_name = instance.data["folderEntity"]["name"] - product_name = instance.data["productName"] - instance_name = f"{folder_name}_{product_name}" - filename = f"{instance_name}.blend" - filepath = os.path.join(stagingdir, filename) - - # Perform extraction - self.log.debug("Performing extraction..") - - data_blocks = set() - - for data in instance: - data_blocks.add(data) - # Pack used images in the blend files. - if not ( - isinstance(data, bpy.types.Object) and data.type == 'MESH' - ): - continue - for material_slot in data.material_slots: - mat = material_slot.material - if not (mat and mat.use_nodes): - continue - tree = mat.node_tree - if tree.type != 'SHADER': - continue - for node in tree.nodes: - if node.bl_idname != 'ShaderNodeTexImage': - continue - # Check if image is not packed already - # and pack it if not. - if node.image and node.image.packed_file is None: - node.image.pack() - - bpy.data.libraries.write(filepath, data_blocks, compress=self.compress) - - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': 'blend', - 'ext': 'blend', - 'files': filename, - "stagingDir": stagingdir, - } - instance.data["representations"].append(representation) - - self.log.debug("Extracted instance '%s' to: %s", - instance.name, representation) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/extract_blend_animation.py b/server_addon/blender/client/ayon_blender/plugins/publish/extract_blend_animation.py deleted file mode 100644 index cca8ab2dd6..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/extract_blend_animation.py +++ /dev/null @@ -1,67 +0,0 @@ -import os - -import bpy - -from ayon_core.pipeline import publish -from ayon_blender.api import plugin - - -class ExtractBlendAnimation( - plugin.BlenderExtractor, - publish.OptionalPyblishPluginMixin, -): - """Extract a blend file.""" - - label = "Extract Blend" - hosts = ["blender"] - families = ["animation"] - optional = True - - # From settings - compress = False - - def process(self, instance): - if not self.is_active(instance.data): - return - - # Define extract output file path - - stagingdir = self.staging_dir(instance) - folder_name = instance.data["folderEntity"]["name"] - product_name = instance.data["productName"] - instance_name = f"{folder_name}_{product_name}" - filename = f"{instance_name}.blend" - filepath = os.path.join(stagingdir, filename) - - # Perform extraction - self.log.debug("Performing extraction..") - - data_blocks = set() - - for obj in instance: - if isinstance(obj, bpy.types.Object) and obj.type == 'EMPTY': - child = obj.children[0] - if child and child.type == 'ARMATURE': - if child.animation_data and child.animation_data.action: - if not obj.animation_data: - obj.animation_data_create() - obj.animation_data.action = child.animation_data.action - obj.animation_data_clear() - data_blocks.add(child.animation_data.action) - data_blocks.add(obj) - - bpy.data.libraries.write(filepath, data_blocks, compress=self.compress) - - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': 'blend', - 'ext': 'blend', - 'files': filename, - "stagingDir": stagingdir, - } - instance.data["representations"].append(representation) - - self.log.debug("Extracted instance '%s' to: %s", - instance.name, representation) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/extract_camera_abc.py b/server_addon/blender/client/ayon_blender/plugins/publish/extract_camera_abc.py deleted file mode 100644 index 278cd293c5..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/extract_camera_abc.py +++ /dev/null @@ -1,70 +0,0 @@ -import os - -import bpy - -from ayon_core.pipeline import publish -from ayon_blender.api import plugin - - -class ExtractCameraABC( - plugin.BlenderExtractor, publish.OptionalPyblishPluginMixin -): - """Extract camera as ABC.""" - - label = "Extract Camera (ABC)" - hosts = ["blender"] - families = ["camera"] - optional = True - - def process(self, instance): - if not self.is_active(instance.data): - return - - # Define extract output file path - stagingdir = self.staging_dir(instance) - folder_name = instance.data["folderEntity"]["name"] - product_name = instance.data["productName"] - instance_name = f"{folder_name}_{product_name}" - filename = f"{instance_name}.abc" - filepath = os.path.join(stagingdir, filename) - - # Perform extraction - self.log.debug("Performing extraction..") - - plugin.deselect_all() - - asset_group = instance.data["transientData"]["instance_node"] - - # Need to cast to list because children is a tuple - selected = list(asset_group.children) - active = selected[0] - - for obj in selected: - obj.select_set(True) - - context = plugin.create_blender_context( - active=active, selected=selected) - - with bpy.context.temp_override(**context): - # We export the abc - bpy.ops.wm.alembic_export( - filepath=filepath, - selected=True, - flatten=True - ) - - plugin.deselect_all() - - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': 'abc', - 'ext': 'abc', - 'files': filename, - "stagingDir": stagingdir, - } - instance.data["representations"].append(representation) - - self.log.debug("Extracted instance '%s' to: %s", - instance.name, representation) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/extract_camera_fbx.py b/server_addon/blender/client/ayon_blender/plugins/publish/extract_camera_fbx.py deleted file mode 100644 index 9094355a72..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/extract_camera_fbx.py +++ /dev/null @@ -1,85 +0,0 @@ -import os - -import bpy - -from ayon_core.pipeline import publish -from ayon_blender.api import plugin - - -class ExtractCamera( - plugin.BlenderExtractor, publish.OptionalPyblishPluginMixin -): - """Extract as the camera as FBX.""" - - label = "Extract Camera (FBX)" - hosts = ["blender"] - families = ["camera"] - optional = True - - def process(self, instance): - if not self.is_active(instance.data): - return - - # Define extract output file path - stagingdir = self.staging_dir(instance) - folder_name = instance.data["folderEntity"]["name"] - product_name = instance.data["productName"] - instance_name = f"{folder_name}_{product_name}" - filename = f"{instance_name}.fbx" - filepath = os.path.join(stagingdir, filename) - - # Perform extraction - self.log.debug("Performing extraction..") - - plugin.deselect_all() - - selected = [] - - camera = None - - for obj in instance: - if obj.type == "CAMERA": - obj.select_set(True) - selected.append(obj) - camera = obj - break - - assert camera, "No camera found" - - context = plugin.create_blender_context( - active=camera, selected=selected) - - scale_length = bpy.context.scene.unit_settings.scale_length - bpy.context.scene.unit_settings.scale_length = 0.01 - - with bpy.context.temp_override(**context): - # We export the fbx - bpy.ops.export_scene.fbx( - filepath=filepath, - use_active_collection=False, - use_selection=True, - bake_anim_use_nla_strips=False, - bake_anim_use_all_actions=False, - add_leaf_bones=False, - armature_nodetype='ROOT', - object_types={'CAMERA'}, - bake_anim_simplify_factor=0.0 - ) - - bpy.context.scene.unit_settings.scale_length = scale_length - - plugin.deselect_all() - - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': 'fbx', - 'ext': 'fbx', - 'files': filename, - "stagingDir": stagingdir, - } - instance.data["representations"].append(representation) - - self.log.debug("Extracted instance '%s' to: %s", - instance.name, representation) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/extract_fbx.py b/server_addon/blender/client/ayon_blender/plugins/publish/extract_fbx.py deleted file mode 100644 index 085f7b18c3..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/extract_fbx.py +++ /dev/null @@ -1,93 +0,0 @@ -import os - -import bpy - -from ayon_core.pipeline import publish -from ayon_blender.api import plugin - - -class ExtractFBX( - plugin.BlenderExtractor, publish.OptionalPyblishPluginMixin -): - """Extract as FBX.""" - - label = "Extract FBX" - hosts = ["blender"] - families = ["model", "rig"] - optional = True - - def process(self, instance): - if not self.is_active(instance.data): - return - - # Define extract output file path - stagingdir = self.staging_dir(instance) - folder_name = instance.data["folderEntity"]["name"] - product_name = instance.data["productName"] - instance_name = f"{folder_name}_{product_name}" - filename = f"{instance_name}.fbx" - filepath = os.path.join(stagingdir, filename) - - # Perform extraction - self.log.debug("Performing extraction..") - - plugin.deselect_all() - - asset_group = instance.data["transientData"]["instance_node"] - - selected = [] - for obj in instance: - obj.select_set(True) - selected.append(obj) - - context = plugin.create_blender_context( - active=asset_group, selected=selected) - - new_materials = [] - new_materials_objs = [] - objects = list(asset_group.children) - - for obj in objects: - objects.extend(obj.children) - if obj.type == 'MESH' and len(obj.data.materials) == 0: - mat = bpy.data.materials.new(obj.name) - obj.data.materials.append(mat) - new_materials.append(mat) - new_materials_objs.append(obj) - - scale_length = bpy.context.scene.unit_settings.scale_length - bpy.context.scene.unit_settings.scale_length = 0.01 - - with bpy.context.temp_override(**context): - # We export the fbx - bpy.ops.export_scene.fbx( - filepath=filepath, - use_active_collection=False, - use_selection=True, - mesh_smooth_type='FACE', - add_leaf_bones=False - ) - - bpy.context.scene.unit_settings.scale_length = scale_length - - plugin.deselect_all() - - for mat in new_materials: - bpy.data.materials.remove(mat) - - for obj in new_materials_objs: - obj.data.materials.pop() - - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': 'fbx', - 'ext': 'fbx', - 'files': filename, - "stagingDir": stagingdir, - } - instance.data["representations"].append(representation) - - self.log.debug("Extracted instance '%s' to: %s", - instance.name, representation) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/extract_fbx_animation.py b/server_addon/blender/client/ayon_blender/plugins/publish/extract_fbx_animation.py deleted file mode 100644 index 7f49e919db..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/extract_fbx_animation.py +++ /dev/null @@ -1,227 +0,0 @@ -import os -import json - -import bpy -import bpy_extras -import bpy_extras.anim_utils - -from ayon_core.pipeline import publish -from ayon_blender.api import plugin -from ayon_blender.api.pipeline import AVALON_PROPERTY - - -def get_all_parents(obj): - """Get all recursive parents of object""" - result = [] - while True: - obj = obj.parent - if not obj: - break - result.append(obj) - return result - - -def get_highest_root(objects): - # Get the highest object that is also in the collection - included_objects = {obj.name_full for obj in objects} - num_parents_to_obj = {} - for obj in objects: - if isinstance(obj, bpy.types.Object): - parents = get_all_parents(obj) - # included parents - parents = [parent for parent in parents if - parent.name_full in included_objects] - if not parents: - # A node without parents must be a highest root - return obj - - num_parents_to_obj.setdefault(len(parents), obj) - - minimum_parent = min(num_parents_to_obj) - return num_parents_to_obj[minimum_parent] - - -class ExtractAnimationFBX( - plugin.BlenderExtractor, - publish.OptionalPyblishPluginMixin, -): - """Extract as animation.""" - - label = "Extract FBX" - hosts = ["blender"] - families = ["animation"] - optional = True - - def process(self, instance): - if not self.is_active(instance.data): - return - - # Define extract output file path - stagingdir = self.staging_dir(instance) - - # Perform extraction - self.log.debug("Performing extraction..") - - asset_group = instance.data["transientData"]["instance_node"] - - # Get objects in this collection (but not in children collections) - # and for those objects include the children hierarchy - # TODO: Would it make more sense for the Collect Instance collector - # to also always retrieve all the children? - objects = set(asset_group.objects) - - # From the direct children of the collection find the 'root' node - # that we want to export - it is the 'highest' node in a hierarchy - root = get_highest_root(objects) - - for obj in list(objects): - objects.update(obj.children_recursive) - - # Find all armatures among the objects, assume to find only one - armatures = [obj for obj in objects if obj.type == "ARMATURE"] - if not armatures: - raise RuntimeError( - f"Unable to find ARMATURE in collection: " - f"{asset_group.name}" - ) - elif len(armatures) > 1: - self.log.warning( - "Found more than one ARMATURE, using " - f"only first of: {armatures}" - ) - armature = armatures[0] - - object_action_pairs = [] - original_actions = [] - - starting_frames = [] - ending_frames = [] - - # For each armature, we make a copy of the current action - if armature.animation_data and armature.animation_data.action: - curr_action = armature.animation_data.action - copy_action = curr_action.copy() - - curr_frame_range = curr_action.frame_range - - starting_frames.append(curr_frame_range[0]) - ending_frames.append(curr_frame_range[1]) - else: - self.log.info( - f"Armature '{armature.name}' has no animation, " - f"skipping FBX animation extraction for {instance}." - ) - return - - asset_group_name = asset_group.name - asset_name = asset_group.get(AVALON_PROPERTY).get("asset_name") - if asset_name: - # Rename for the export; this data is only present when loaded - # from a JSON Layout (layout family) - asset_group.name = asset_name - - # Remove : from the armature name for the export - armature_name = armature.name - original_name = armature_name.split(':')[1] - armature.name = original_name - - object_action_pairs.append((armature, copy_action)) - original_actions.append(curr_action) - - # We compute the starting and ending frames - max_frame = min(starting_frames) - min_frame = max(ending_frames) - - # We bake the copy of the current action for each object - bpy_extras.anim_utils.bake_action_objects( - object_action_pairs, - frames=range(int(min_frame), int(max_frame)), - do_object=False, - do_clean=False - ) - - for obj in bpy.data.objects: - obj.select_set(False) - - root.select_set(True) - armature.select_set(True) - folder_name = instance.data["folderEntity"]["name"] - product_name = instance.data["productName"] - instance_name = f"{folder_name}_{product_name}" - fbx_filename = f"{instance_name}_{armature.name}.fbx" - filepath = os.path.join(stagingdir, fbx_filename) - - override = plugin.create_blender_context( - active=root, selected=[root, armature]) - - with bpy.context.temp_override(**override): - # We export the fbx - bpy.ops.export_scene.fbx( - filepath=filepath, - use_active_collection=False, - use_selection=True, - bake_anim_use_nla_strips=False, - bake_anim_use_all_actions=False, - add_leaf_bones=False, - armature_nodetype='ROOT', - object_types={'EMPTY', 'ARMATURE'} - ) - - armature.name = armature_name - asset_group.name = asset_group_name - root.select_set(True) - armature.select_set(False) - - # We delete the baked action and set the original one back - for i in range(0, len(object_action_pairs)): - pair = object_action_pairs[i] - action = original_actions[i] - - if action: - pair[0].animation_data.action = action - - if pair[1]: - pair[1].user_clear() - bpy.data.actions.remove(pair[1]) - - json_filename = f"{instance_name}.json" - json_path = os.path.join(stagingdir, json_filename) - - json_dict = { - "instance_name": asset_group.get(AVALON_PROPERTY).get("objectName") - } - - # collection = instance.data.get("name") - # container = None - # for obj in bpy.data.collections[collection].objects: - # if obj.type == "ARMATURE": - # container_name = obj.get("avalon").get("container_name") - # container = bpy.data.collections[container_name] - # if container: - # json_dict = { - # "instance_name": container.get("avalon").get("instance_name") - # } - - with open(json_path, "w+") as file: - json.dump(json_dict, fp=file, indent=2) - - if "representations" not in instance.data: - instance.data["representations"] = [] - - fbx_representation = { - 'name': 'fbx', - 'ext': 'fbx', - 'files': fbx_filename, - "stagingDir": stagingdir, - } - json_representation = { - 'name': 'json', - 'ext': 'json', - 'files': json_filename, - "stagingDir": stagingdir, - } - instance.data["representations"].append(fbx_representation) - instance.data["representations"].append(json_representation) - - self.log.debug("Extracted instance '{}' to: {}".format( - instance.name, fbx_representation)) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/extract_layout.py b/server_addon/blender/client/ayon_blender/plugins/publish/extract_layout.py deleted file mode 100644 index 0732d29c9d..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/extract_layout.py +++ /dev/null @@ -1,279 +0,0 @@ -import os -import json - -import bpy -import bpy_extras -import bpy_extras.anim_utils - -from ayon_api import get_representations - -from ayon_core.pipeline import publish -from ayon_blender.api import plugin -from ayon_blender.api.pipeline import AVALON_PROPERTY - - -class ExtractLayout( - plugin.BlenderExtractor, publish.OptionalPyblishPluginMixin -): - """Extract a layout.""" - - label = "Extract Layout (JSON)" - hosts = ["blender"] - families = ["layout"] - optional = True - - def _export_animation(self, asset, instance, stagingdir, fbx_count): - n = fbx_count - - for obj in asset.children: - if obj.type != "ARMATURE": - continue - - object_action_pairs = [] - original_actions = [] - - starting_frames = [] - ending_frames = [] - - # For each armature, we make a copy of the current action - curr_action = None - copy_action = None - - if obj.animation_data and obj.animation_data.action: - curr_action = obj.animation_data.action - copy_action = curr_action.copy() - - curr_frame_range = curr_action.frame_range - - starting_frames.append(curr_frame_range[0]) - ending_frames.append(curr_frame_range[1]) - else: - self.log.info("Object has no animation.") - continue - - asset_group_name = asset.name - asset.name = asset.get(AVALON_PROPERTY).get("asset_name") - - armature_name = obj.name - original_name = armature_name.split(':')[1] - obj.name = original_name - - object_action_pairs.append((obj, copy_action)) - original_actions.append(curr_action) - - # We compute the starting and ending frames - max_frame = min(starting_frames) - min_frame = max(ending_frames) - - # We bake the copy of the current action for each object - bpy_extras.anim_utils.bake_action_objects( - object_action_pairs, - frames=range(int(min_frame), int(max_frame)), - do_object=False, - do_clean=False - ) - - for o in bpy.data.objects: - o.select_set(False) - - asset.select_set(True) - obj.select_set(True) - fbx_filename = f"{n:03d}.fbx" - filepath = os.path.join(stagingdir, fbx_filename) - - override = plugin.create_blender_context( - active=asset, selected=[asset, obj]) - with bpy.context.temp_override(**override): - # We export the fbx - bpy.ops.export_scene.fbx( - filepath=filepath, - use_active_collection=False, - use_selection=True, - bake_anim_use_nla_strips=False, - bake_anim_use_all_actions=False, - add_leaf_bones=False, - armature_nodetype='ROOT', - object_types={'EMPTY', 'ARMATURE'} - ) - obj.name = armature_name - asset.name = asset_group_name - asset.select_set(False) - obj.select_set(False) - - # We delete the baked action and set the original one back - for i in range(0, len(object_action_pairs)): - pair = object_action_pairs[i] - action = original_actions[i] - - if action: - pair[0].animation_data.action = action - - if pair[1]: - pair[1].user_clear() - bpy.data.actions.remove(pair[1]) - - return fbx_filename, n + 1 - - return None, n - - def process(self, instance): - if not self.is_active(instance.data): - return - - # Define extract output file path - stagingdir = self.staging_dir(instance) - - # Perform extraction - self.log.debug("Performing extraction..") - - if "representations" not in instance.data: - instance.data["representations"] = [] - - json_data = [] - fbx_files = [] - - asset_group = instance.data["transientData"]["instance_node"] - - fbx_count = 0 - - project_name = instance.context.data["projectName"] - version_ids = set() - filtered_assets = [] - for asset in asset_group.children: - metadata = asset.get(AVALON_PROPERTY) - if not metadata: - # Avoid raising error directly if there's just invalid data - # inside the instance; better to log it to the artist - # TODO: This should actually be validated in a validator - self.log.warning( - f"Found content in layout that is not a loaded " - f"asset, skipping: {asset.name_full}" - ) - continue - - filtered_assets.append((asset, metadata)) - version_ids.add(metadata["parent"]) - - repre_entities = get_representations( - project_name, - representation_names={"blend", "fbx", "abc"}, - version_ids=version_ids, - fields={"id", "versionId", "name"} - ) - repre_mapping_by_version_id = { - version_id: {} - for version_id in version_ids - } - for repre_entity in repre_entities: - version_id = repre_entity["versionId"] - repre_mapping_by_version_id[version_id][repre_entity["name"]] = ( - repre_entity - ) - - for asset, metadata in filtered_assets: - version_id = metadata["parent"] - product_type = metadata.get("product_type") - if product_type is None: - product_type = metadata["family"] - - repres_by_name = repre_mapping_by_version_id[version_id] - - self.log.debug("Parent: {}".format(version_id)) - # Get blend, fbx and abc reference - blend_id = repres_by_name.get("blend", {}).get("id") - fbx_id = repres_by_name.get("fbx", {}).get("id") - abc_id = repres_by_name.get("abc", {}).get("id") - json_element = { - key: value - for key, value in ( - ("reference", blend_id), - ("reference_fbx", fbx_id), - ("reference_abc", abc_id), - ) - if value - } - json_element["product_type"] = product_type - json_element["instance_name"] = asset.name - json_element["asset_name"] = metadata["asset_name"] - json_element["file_path"] = metadata["libpath"] - - json_element["transform"] = { - "translation": { - "x": asset.location.x, - "y": asset.location.y, - "z": asset.location.z - }, - "rotation": { - "x": asset.rotation_euler.x, - "y": asset.rotation_euler.y, - "z": asset.rotation_euler.z - }, - "scale": { - "x": asset.scale.x, - "y": asset.scale.y, - "z": asset.scale.z - } - } - - json_element["transform_matrix"] = [] - - for row in list(asset.matrix_world.transposed()): - json_element["transform_matrix"].append(list(row)) - - json_element["basis"] = [ - [1, 0, 0, 0], - [0, -1, 0, 0], - [0, 0, 1, 0], - [0, 0, 0, 1] - ] - - # Extract the animation as well - if product_type == "rig": - f, n = self._export_animation( - asset, instance, stagingdir, fbx_count) - if f: - fbx_files.append(f) - json_element["animation"] = f - fbx_count = n - - json_data.append(json_element) - - folder_name = instance.data["folderEntity"]["name"] - product_name = instance.data["productName"] - instance_name = f"{folder_name}_{product_name}" - json_filename = f"{instance_name}.json" - - json_path = os.path.join(stagingdir, json_filename) - - with open(json_path, "w+") as file: - json.dump(json_data, fp=file, indent=2) - - json_representation = { - 'name': 'json', - 'ext': 'json', - 'files': json_filename, - "stagingDir": stagingdir, - } - instance.data["representations"].append(json_representation) - - self.log.debug(fbx_files) - - if len(fbx_files) == 1: - fbx_representation = { - 'name': 'fbx', - 'ext': '000.fbx', - 'files': fbx_files[0], - "stagingDir": stagingdir, - } - instance.data["representations"].append(fbx_representation) - elif len(fbx_files) > 1: - fbx_representation = { - 'name': 'fbx', - 'ext': 'fbx', - 'files': fbx_files, - "stagingDir": stagingdir, - } - instance.data["representations"].append(fbx_representation) - - self.log.debug("Extracted instance '%s' to: %s", - instance.name, json_representation) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/extract_playblast.py b/server_addon/blender/client/ayon_blender/plugins/publish/extract_playblast.py deleted file mode 100644 index 0f769c296d..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/extract_playblast.py +++ /dev/null @@ -1,129 +0,0 @@ -import os -import json - -import clique -import pyblish.api - -import bpy - -from ayon_core.pipeline import publish -from ayon_blender.api import capture, plugin -from ayon_blender.api.lib import maintained_time - - -class ExtractPlayblast( - plugin.BlenderExtractor, publish.OptionalPyblishPluginMixin -): - """ - Extract viewport playblast. - - Takes review camera and creates review Quicktime video based on viewport - capture. - """ - - label = "Extract Playblast" - hosts = ["blender"] - families = ["review"] - optional = True - order = pyblish.api.ExtractorOrder + 0.01 - - presets = "{}" - - def process(self, instance): - if not self.is_active(instance.data): - return - - # get scene fps - fps = instance.data.get("fps") - if fps is None: - fps = bpy.context.scene.render.fps - instance.data["fps"] = fps - - self.log.debug(f"fps: {fps}") - - # If start and end frames cannot be determined, - # get them from Blender timeline. - start = instance.data.get("frameStart", bpy.context.scene.frame_start) - end = instance.data.get("frameEnd", bpy.context.scene.frame_end) - - self.log.debug(f"start: {start}, end: {end}") - assert end > start, "Invalid time range !" - - # get cameras - camera = instance.data("review_camera", None) - - # get isolate objects list - isolate = instance.data("isolate", None) - - # get output path - stagingdir = self.staging_dir(instance) - folder_name = instance.data["folderEntity"]["name"] - product_name = instance.data["productName"] - filename = f"{folder_name}_{product_name}" - - path = os.path.join(stagingdir, filename) - - self.log.debug(f"Outputting images to {path}") - - presets = json.loads(self.presets) - preset = presets.get("default") - preset.update({ - "camera": camera, - "start_frame": start, - "end_frame": end, - "filename": path, - "overwrite": True, - "isolate": isolate, - }) - preset.setdefault( - "image_settings", - { - "file_format": "PNG", - "color_mode": "RGB", - "color_depth": "8", - "compression": 15, - }, - ) - - with maintained_time(): - path = capture(**preset) - - self.log.debug(f"playblast path {path}") - - collected_files = os.listdir(stagingdir) - collections, remainder = clique.assemble( - collected_files, - patterns=[f"{filename}\\.{clique.DIGITS_PATTERN}\\.png$"], - ) - - if len(collections) > 1: - raise RuntimeError( - f"More than one collection found in stagingdir: {stagingdir}" - ) - elif len(collections) == 0: - raise RuntimeError( - f"No collection found in stagingdir: {stagingdir}" - ) - - frame_collection = collections[0] - - self.log.debug(f"Found collection of interest {frame_collection}") - - instance.data.setdefault("representations", []) - - tags = ["review"] - if not instance.data.get("keepImages"): - tags.append("delete") - - representation = { - "name": "png", - "ext": "png", - "files": list(frame_collection), - "stagingDir": stagingdir, - "frameStart": start, - "frameEnd": end, - "fps": fps, - "tags": tags, - "camera_name": camera - } - instance.data["representations"].append(representation) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/extract_thumbnail.py b/server_addon/blender/client/ayon_blender/plugins/publish/extract_thumbnail.py deleted file mode 100644 index e3bce8bf73..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/extract_thumbnail.py +++ /dev/null @@ -1,107 +0,0 @@ -import os -import glob -import json - -import pyblish.api -from ayon_blender.api import capture, plugin -from ayon_blender.api.lib import maintained_time - -import bpy - - -class ExtractThumbnail(plugin.BlenderExtractor): - """Extract viewport thumbnail. - - Takes review camera and creates a thumbnail based on viewport - capture. - - """ - - label = "Extract Thumbnail" - hosts = ["blender"] - families = ["review"] - order = pyblish.api.ExtractorOrder + 0.01 - presets = "{}" - - def process(self, instance): - self.log.debug("Extracting capture..") - - if instance.data.get("thumbnailSource"): - self.log.debug("Thumbnail source found, skipping...") - return - - stagingdir = self.staging_dir(instance) - folder_name = instance.data["folderEntity"]["name"] - product_name = instance.data["productName"] - filename = f"{folder_name}_{product_name}" - - path = os.path.join(stagingdir, filename) - - self.log.debug(f"Outputting images to {path}") - - camera = instance.data.get("review_camera", "AUTO") - start = instance.data.get("frameStart", bpy.context.scene.frame_start) - product_type = instance.data["productType"] - isolate = instance.data("isolate", None) - - presets = json.loads(self.presets) - preset = presets.get(product_type, {}) - - preset.update({ - "camera": camera, - "start_frame": start, - "end_frame": start, - "filename": path, - "overwrite": True, - "isolate": isolate, - }) - preset.setdefault( - "image_settings", - { - "file_format": "JPEG", - "color_mode": "RGB", - "quality": 100, - }, - ) - - with maintained_time(): - path = capture(**preset) - - thumbnail = os.path.basename(self._fix_output_path(path)) - - self.log.debug(f"thumbnail: {thumbnail}") - - instance.data.setdefault("representations", []) - - representation = { - "name": "thumbnail", - "ext": "jpg", - "files": thumbnail, - "stagingDir": stagingdir, - "thumbnail": True - } - instance.data["representations"].append(representation) - - def _fix_output_path(self, filepath): - """Workaround to return correct filepath. - - To workaround this we just glob.glob() for any file extensions and - assume the latest modified file is the correct file and return it. - - """ - # Catch cancelled playblast - if filepath is None: - self.log.warning( - "Playblast did not result in output path. " - "Playblast is probably interrupted." - ) - return None - - if not os.path.exists(filepath): - files = glob.glob(f"{filepath}.*.jpg") - - if not files: - raise RuntimeError(f"Couldn't find playblast from: {filepath}") - filepath = max(files, key=os.path.getmtime) - - return filepath diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/extract_usd.py b/server_addon/blender/client/ayon_blender/plugins/publish/extract_usd.py deleted file mode 100644 index 7ea89ae3dc..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/extract_usd.py +++ /dev/null @@ -1,90 +0,0 @@ -import os - -import bpy - -from ayon_core.pipeline import KnownPublishError -from ayon_blender.api import plugin, lib - - -class ExtractUSD(plugin.BlenderExtractor): - """Extract as USD.""" - - label = "Extract USD" - hosts = ["blender"] - families = ["usd"] - - def process(self, instance): - - # Ignore runtime instances (e.g. USD layers) - # TODO: This is better done via more specific `families` - if not instance.data.get("transientData", {}).get("instance_node"): - return - - # Define extract output file path - stagingdir = self.staging_dir(instance) - filename = f"{instance.name}.usd" - filepath = os.path.join(stagingdir, filename) - - # Perform extraction - self.log.debug("Performing extraction..") - - # Select all members to "export selected" - plugin.deselect_all() - - selected = [] - for obj in instance: - if isinstance(obj, bpy.types.Object): - obj.select_set(True) - selected.append(obj) - - root = lib.get_highest_root(objects=instance[:]) - if not root: - instance_node = instance.data["transientData"]["instance_node"] - raise KnownPublishError( - f"No root object found in instance: {instance_node.name}" - ) - self.log.debug(f"Exporting using active root: {root.name}") - - context = plugin.create_blender_context( - active=root, selected=selected) - - # Export USD - with bpy.context.temp_override(**context): - bpy.ops.wm.usd_export( - filepath=filepath, - selected_objects_only=True, - export_textures=False, - relative_paths=False, - export_animation=False, - export_hair=False, - export_uvmaps=True, - # TODO: add for new version of Blender (4+?) - # export_mesh_colors=True, - export_normals=True, - export_materials=True, - use_instancing=True - ) - - plugin.deselect_all() - - # Add representation - representation = { - 'name': 'usd', - 'ext': 'usd', - 'files': filename, - "stagingDir": stagingdir, - } - instance.data.setdefault("representations", []).append(representation) - self.log.debug("Extracted instance '%s' to: %s", - instance.name, representation) - - -class ExtractModelUSD(ExtractUSD): - """Extract model as USD.""" - - label = "Extract USD (Model)" - hosts = ["blender"] - families = ["model"] - - # Driven by settings - optional = True diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/increment_workfile_version.py b/server_addon/blender/client/ayon_blender/plugins/publish/increment_workfile_version.py deleted file mode 100644 index 50d16ea54a..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/increment_workfile_version.py +++ /dev/null @@ -1,33 +0,0 @@ -import pyblish.api -from ayon_core.pipeline.publish import OptionalPyblishPluginMixin -from ayon_blender.api.workio import save_file -from ayon_blender.api import plugin - - -class IncrementWorkfileVersion( - plugin.BlenderContextPlugin, - OptionalPyblishPluginMixin -): - """Increment current workfile version.""" - - order = pyblish.api.IntegratorOrder + 0.9 - label = "Increment Workfile Version" - optional = True - hosts = ["blender"] - families = ["animation", "model", "rig", "action", "layout", "blendScene", - "pointcache", "render.farm"] - - def process(self, context): - if not self.is_active(context.data): - return - - assert all(result["success"] for result in context.data["results"]), ( - "Publishing not successful so version is not increased.") - - from ayon_core.lib import version_up - path = context.data["currentFile"] - filepath = version_up(path) - - save_file(filepath, copy=False) - - self.log.debug('Incrementing blender workfile version') diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/integrate_animation.py b/server_addon/blender/client/ayon_blender/plugins/publish/integrate_animation.py deleted file mode 100644 index b95c280ab0..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/integrate_animation.py +++ /dev/null @@ -1,54 +0,0 @@ -import json - -import pyblish.api -from ayon_core.pipeline.publish import OptionalPyblishPluginMixin -from ayon_blender.api import plugin - - -class IntegrateAnimation( - plugin.BlenderInstancePlugin, - OptionalPyblishPluginMixin, -): - """Generate a JSON file for animation.""" - - label = "Integrate Animation" - order = pyblish.api.IntegratorOrder + 0.1 - optional = True - hosts = ["blender"] - families = ["setdress"] - - def process(self, instance): - self.log.debug("Integrate Animation") - - representation = instance.data.get('representations')[0] - json_path = representation.get('publishedFiles')[0] - - with open(json_path, "r") as file: - data = json.load(file) - - # Update the json file for the setdress to add the published - # representations of the animations - for json_dict in data: - json_product_name = json_dict["productName"] - i = None - for elem in instance.context: - if elem.data["productName"] == json_product_name: - i = elem - break - if not i: - continue - rep = None - pub_repr = i.data["published_representations"] - for elem in pub_repr: - if pub_repr[elem]["representation"]["name"] == "fbx": - rep = pub_repr[elem] - break - if not rep: - continue - obj_id = rep["representation"]["id"] - - if obj_id: - json_dict["representation_id"] = str(obj_id) - - with open(json_path, "w") as file: - json.dump(data, fp=file, indent=2) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/validate_camera_zero_keyframe.py b/server_addon/blender/client/ayon_blender/plugins/publish/validate_camera_zero_keyframe.py deleted file mode 100644 index df66f71dc5..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/validate_camera_zero_keyframe.py +++ /dev/null @@ -1,57 +0,0 @@ -from typing import List - -import bpy - -import ayon_blender.api.action -from ayon_blender.api import plugin -from ayon_core.pipeline.publish import ( - ValidateContentsOrder, - PublishValidationError, - OptionalPyblishPluginMixin -) - - -class ValidateCameraZeroKeyframe( - plugin.BlenderInstancePlugin, - OptionalPyblishPluginMixin -): - """Camera must have a keyframe at frame 0. - - Unreal shifts the first keyframe to frame 0. Forcing the camera to have - a keyframe at frame 0 will ensure that the animation will be the same - in Unreal and Blender. - """ - - order = ValidateContentsOrder - hosts = ["blender"] - families = ["camera"] - label = "Zero Keyframe" - actions = [ayon_blender.api.action.SelectInvalidAction] - - @staticmethod - def get_invalid(instance) -> List: - invalid = [] - for obj in instance: - if isinstance(obj, bpy.types.Object) and obj.type == "CAMERA": - if obj.animation_data and obj.animation_data.action: - action = obj.animation_data.action - frames_set = set() - for fcu in action.fcurves: - for kp in fcu.keyframe_points: - frames_set.add(kp.co[0]) - frames = list(frames_set) - frames.sort() - if frames[0] != 0.0: - invalid.append(obj) - return invalid - - def process(self, instance): - if not self.is_active(instance.data): - return - - invalid = self.get_invalid(instance) - if invalid: - names = ", ".join(obj.name for obj in invalid) - raise PublishValidationError( - f"Camera must have a keyframe at frame 0: {names}" - ) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/validate_deadline_publish.py b/server_addon/blender/client/ayon_blender/plugins/publish/validate_deadline_publish.py deleted file mode 100644 index fe544ee310..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/validate_deadline_publish.py +++ /dev/null @@ -1,61 +0,0 @@ -import os - -import bpy - -from ayon_core.pipeline.publish import ( - RepairAction, - ValidateContentsOrder, - PublishValidationError, - OptionalPyblishPluginMixin -) -from ayon_blender.api import plugin -from ayon_blender.api.render_lib import prepare_rendering - - -class ValidateDeadlinePublish( - plugin.BlenderInstancePlugin, - OptionalPyblishPluginMixin -): - """Validates Render File Directory is - not the same in every submission - """ - - order = ValidateContentsOrder - families = ["render"] - hosts = ["blender"] - label = "Validate Render Output for Deadline" - optional = True - actions = [RepairAction] - - def process(self, instance): - if not self.is_active(instance.data): - return - - tree = bpy.context.scene.node_tree - output_type = "CompositorNodeOutputFile" - output_node = None - # Remove all output nodes that include "AYON" in the name. - # There should be only one. - for node in tree.nodes: - if node.bl_idname == output_type and "AYON" in node.name: - output_node = node - break - if not output_node: - raise PublishValidationError( - "No output node found in the compositor tree." - ) - filepath = bpy.data.filepath - file = os.path.basename(filepath) - filename, ext = os.path.splitext(file) - if filename not in output_node.base_path: - raise PublishValidationError( - "Render output folder doesn't match the blender scene name! " - "Use Repair action to fix the folder file path." - ) - - @classmethod - def repair(cls, instance): - container = instance.data["transientData"]["instance_node"] - prepare_rendering(container) - bpy.ops.wm.save_as_mainfile(filepath=bpy.data.filepath) - cls.log.debug("Reset the render output folder...") diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/validate_file_saved.py b/server_addon/blender/client/ayon_blender/plugins/publish/validate_file_saved.py deleted file mode 100644 index e6b7b01c71..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/validate_file_saved.py +++ /dev/null @@ -1,66 +0,0 @@ -import bpy - -import pyblish.api - -from ayon_core.pipeline.publish import ( - OptionalPyblishPluginMixin, - PublishValidationError -) -from ayon_blender.api import plugin - - -class SaveWorkfileAction(pyblish.api.Action): - """Save Workfile.""" - label = "Save Workfile" - on = "failed" - icon = "save" - - def process(self, context, plugin): - bpy.ops.wm.avalon_workfiles() - - -class ValidateFileSaved( - plugin.BlenderContextPlugin, - OptionalPyblishPluginMixin -): - """Validate that the workfile has been saved.""" - - order = pyblish.api.ValidatorOrder - 0.01 - hosts = ["blender"] - label = "Validate File Saved" - optional = False - # TODO rename to 'exclude_product_types' - exclude_families = [] - actions = [SaveWorkfileAction] - - def process(self, context): - if not self.is_active(context.data): - return - - if not context.data["currentFile"]: - # File has not been saved at all and has no filename - raise PublishValidationError( - "Current workfile has not been saved yet.\n" - "Save the workfile before continuing." - ) - - # Do not validate workfile has unsaved changes if only instances - # present of families that should be excluded - product_types = { - instance.data["productType"] for instance in context - # Consider only enabled instances - if instance.data.get("publish", True) - and instance.data.get("active", True) - } - - def is_excluded(family): - return any(family in exclude_family - for exclude_family in self.exclude_families) - - if all(is_excluded(product_type) for product_type in product_types): - self.log.debug("Only excluded families found, skipping workfile " - "unsaved changes validation..") - return - - if bpy.data.is_dirty: - raise PublishValidationError("Workfile has unsaved changes.") diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/validate_instance_empty.py b/server_addon/blender/client/ayon_blender/plugins/publish/validate_instance_empty.py deleted file mode 100644 index 9561cc7020..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/validate_instance_empty.py +++ /dev/null @@ -1,20 +0,0 @@ -import pyblish.api -from ayon_core.pipeline.publish import PublishValidationError -from ayon_blender.api import plugin - - -class ValidateInstanceEmpty(plugin.BlenderInstancePlugin): - """Validator to verify that the instance is not empty""" - - order = pyblish.api.ValidatorOrder - 0.01 - hosts = ["blender"] - families = ["model", "pointcache", "rig", "camera" "layout", "blendScene"] - label = "Validate Instance is not Empty" - optional = False - - def process(self, instance): - # Members are collected by `collect_instance` so we only need to check - # whether any member is included. The instance node will be included - # as a member as well, hence we will check for at least 2 members - if len(instance) < 2: - raise PublishValidationError(f"Instance {instance.name} is empty.") diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/validate_mesh_has_uv.py b/server_addon/blender/client/ayon_blender/plugins/publish/validate_mesh_has_uv.py deleted file mode 100644 index 3dd49e0e45..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/validate_mesh_has_uv.py +++ /dev/null @@ -1,65 +0,0 @@ -from typing import List - -import bpy - -from ayon_core.pipeline.publish import ( - ValidateContentsOrder, - OptionalPyblishPluginMixin, - PublishValidationError -) -import ayon_blender.api.action -from ayon_blender.api import plugin - - -class ValidateMeshHasUvs( - plugin.BlenderInstancePlugin, - OptionalPyblishPluginMixin, -): - """Validate that the current mesh has UV's.""" - - order = ValidateContentsOrder - hosts = ["blender"] - families = ["model"] - label = "Mesh Has UVs" - actions = [ayon_blender.api.action.SelectInvalidAction] - optional = True - - @staticmethod - def has_uvs(obj: bpy.types.Object) -> bool: - """Check if an object has uv's.""" - if not obj.data.uv_layers: - return False - for uv_layer in obj.data.uv_layers: - for polygon in obj.data.polygons: - for loop_index in polygon.loop_indices: - if ( - loop_index >= len(uv_layer.data) - or not uv_layer.data[loop_index].uv - ): - return False - - return True - - @classmethod - def get_invalid(cls, instance) -> List: - invalid = [] - for obj in instance: - if isinstance(obj, bpy.types.Object) and obj.type == 'MESH': - if obj.mode != "OBJECT": - cls.log.warning( - f"Mesh object {obj.name} should be in 'OBJECT' mode" - " to be properly checked." - ) - if not cls.has_uvs(obj): - invalid.append(obj) - return invalid - - def process(self, instance): - if not self.is_active(instance.data): - return - - invalid = self.get_invalid(instance) - if invalid: - raise PublishValidationError( - f"Meshes found in instance without valid UV's: {invalid}" - ) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/validate_mesh_no_negative_scale.py b/server_addon/blender/client/ayon_blender/plugins/publish/validate_mesh_no_negative_scale.py deleted file mode 100644 index 91de310e46..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/validate_mesh_no_negative_scale.py +++ /dev/null @@ -1,44 +0,0 @@ -from typing import List - -import bpy - -from ayon_core.pipeline.publish import ( - ValidateContentsOrder, - OptionalPyblishPluginMixin, - PublishValidationError -) -import ayon_blender.api.action -from ayon_blender.api import plugin - - -class ValidateMeshNoNegativeScale( - plugin.BlenderInstancePlugin, - OptionalPyblishPluginMixin -): - """Ensure that meshes don't have a negative scale.""" - - order = ValidateContentsOrder - hosts = ["blender"] - families = ["model"] - label = "Mesh No Negative Scale" - actions = [ayon_blender.api.action.SelectInvalidAction] - - @staticmethod - def get_invalid(instance) -> List: - invalid = [] - for obj in instance: - if isinstance(obj, bpy.types.Object) and obj.type == 'MESH': - if any(v < 0 for v in obj.scale): - invalid.append(obj) - return invalid - - def process(self, instance): - if not self.is_active(instance.data): - return - - invalid = self.get_invalid(instance) - if invalid: - names = ", ".join(obj.name for obj in invalid) - raise PublishValidationError( - f"Meshes found in instance with negative scale: {names}" - ) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/validate_model_uv_map1.py b/server_addon/blender/client/ayon_blender/plugins/publish/validate_model_uv_map1.py deleted file mode 100644 index 74f550b6db..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/validate_model_uv_map1.py +++ /dev/null @@ -1,93 +0,0 @@ -import inspect -from typing import List - -import bpy - -from ayon_core.pipeline.publish import ( - ValidateContentsOrder, - OptionalPyblishPluginMixin, - PublishValidationError, - RepairAction -) -import ayon_blender.api.action -from ayon_blender.api import plugin - - -class ValidateModelMeshUvMap1( - plugin.BlenderInstancePlugin, - OptionalPyblishPluginMixin, -): - """Validate model mesh uvs are named `map1`. - - This is solely to get them to work nicely for the Maya pipeline. - """ - - order = ValidateContentsOrder - hosts = ["blender"] - families = ["model"] - label = "Mesh UVs named map1" - actions = [ayon_blender.api.action.SelectInvalidAction, - RepairAction] - optional = True - enabled = False - - @classmethod - def get_invalid(cls, instance) -> List: - - invalid = [] - for obj in instance: - if obj.mode != "OBJECT": - cls.log.warning( - f"Mesh object {obj.name} should be in 'OBJECT' mode" - " to be properly checked." - ) - - obj_data = obj.data - if isinstance(obj_data, bpy.types.Mesh): - mesh = obj_data - - # Ignore mesh without UVs - if not mesh.uv_layers: - continue - - # If mesh has map1 all is ok - if mesh.uv_layers.get("map1"): - continue - - cls.log.warning( - f"Mesh object {obj.name} should be in 'OBJECT' mode" - " to be properly checked." - ) - invalid.append(obj) - - return invalid - - @classmethod - def repair(cls, instance): - for obj in cls.get_invalid(instance): - mesh = obj.data - - # Rename the first UV set to map1 - mesh.uv_layers[0].name = "map1" - - def process(self, instance): - if not self.is_active(instance.data): - return - - invalid = self.get_invalid(instance) - if invalid: - raise PublishValidationError( - f"Meshes found in instance without valid UV's: {invalid}", - description=self.get_description() - ) - - def get_description(self): - return inspect.cleandoc( - """## Meshes must have map1 uv set - - To accompany a better Maya-focused pipeline with Alembics it is - expected that a Mesh has a `map1` UV set. Blender defaults to - a UV set named `UVMap` and thus needs to be renamed. - - """ - ) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/validate_no_colons_in_name.py b/server_addon/blender/client/ayon_blender/plugins/publish/validate_no_colons_in_name.py deleted file mode 100644 index dbafb53263..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/validate_no_colons_in_name.py +++ /dev/null @@ -1,53 +0,0 @@ -from typing import List - -import bpy - -import ayon_blender.api.action -from ayon_blender.api import plugin -from ayon_core.pipeline.publish import ( - ValidateContentsOrder, - OptionalPyblishPluginMixin, - PublishValidationError -) - - -class ValidateNoColonsInName( - plugin.BlenderInstancePlugin, - OptionalPyblishPluginMixin -): - """There cannot be colons in names - - Object or bone names cannot include colons. Other software do not - handle colons correctly. - - """ - - order = ValidateContentsOrder - hosts = ["blender"] - families = ["model", "rig"] - label = "No Colons in names" - actions = [ayon_blender.api.action.SelectInvalidAction] - - @staticmethod - def get_invalid(instance) -> List: - invalid = [] - for obj in instance: - if ':' in obj.name: - invalid.append(obj) - if isinstance(obj, bpy.types.Object) and obj.type == 'ARMATURE': - for bone in obj.data.bones: - if ':' in bone.name: - invalid.append(obj) - break - return invalid - - def process(self, instance): - if not self.is_active(instance.data): - return - - invalid = self.get_invalid(instance) - if invalid: - names = ", ".join(obj.name for obj in invalid) - raise PublishValidationError( - f"Objects found with colon in name: {names}" - ) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/validate_object_mode.py b/server_addon/blender/client/ayon_blender/plugins/publish/validate_object_mode.py deleted file mode 100644 index 4dc2c39949..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/validate_object_mode.py +++ /dev/null @@ -1,44 +0,0 @@ -from typing import List - -import bpy - -import pyblish.api -from ayon_core.pipeline.publish import ( - OptionalPyblishPluginMixin, - PublishValidationError -) -import ayon_blender.api.action -from ayon_blender.api import plugin - - -class ValidateObjectIsInObjectMode( - plugin.BlenderInstancePlugin, - OptionalPyblishPluginMixin, -): - """Validate that the objects in the instance are in Object Mode.""" - - order = pyblish.api.ValidatorOrder - 0.01 - hosts = ["blender"] - families = ["model", "rig", "layout"] - label = "Validate Object Mode" - actions = [ayon_blender.api.action.SelectInvalidAction] - optional = False - - @staticmethod - def get_invalid(instance) -> List: - invalid = [] - for obj in instance: - if isinstance(obj, bpy.types.Object) and obj.mode != "OBJECT": - invalid.append(obj) - return invalid - - def process(self, instance): - if not self.is_active(instance.data): - return - - invalid = self.get_invalid(instance) - if invalid: - names = ", ".join(obj.name for obj in invalid) - raise PublishValidationError( - f"Object found in instance is not in Object Mode: {names}" - ) diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/validate_render_camera_is_set.py b/server_addon/blender/client/ayon_blender/plugins/publish/validate_render_camera_is_set.py deleted file mode 100644 index 15eb64d2ad..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/validate_render_camera_is_set.py +++ /dev/null @@ -1,29 +0,0 @@ -import bpy - -import pyblish.api - -from ayon_core.pipeline.publish import ( - OptionalPyblishPluginMixin, - PublishValidationError -) -from ayon_blender.api import plugin - - -class ValidateRenderCameraIsSet( - plugin.BlenderInstancePlugin, - OptionalPyblishPluginMixin -): - """Validate that there is a camera set as active for rendering.""" - - order = pyblish.api.ValidatorOrder - hosts = ["blender"] - families = ["render"] - label = "Validate Render Camera Is Set" - optional = False - - def process(self, instance): - if not self.is_active(instance.data): - return - - if not bpy.context.scene.camera: - raise PublishValidationError("No camera is active for rendering.") diff --git a/server_addon/blender/client/ayon_blender/plugins/publish/validate_transform_zero.py b/server_addon/blender/client/ayon_blender/plugins/publish/validate_transform_zero.py deleted file mode 100644 index c7bfc6e8a6..0000000000 --- a/server_addon/blender/client/ayon_blender/plugins/publish/validate_transform_zero.py +++ /dev/null @@ -1,94 +0,0 @@ -import inspect -from typing import List - -import mathutils -import bpy - -from ayon_blender.api import plugin, lib -import ayon_blender.api.action -from ayon_core.pipeline.publish import ( - ValidateContentsOrder, - OptionalPyblishPluginMixin, - PublishValidationError, - RepairAction -) - - -class ValidateTransformZero( - plugin.BlenderInstancePlugin, - OptionalPyblishPluginMixin -): - """Transforms can't have any values""" - - order = ValidateContentsOrder - hosts = ["blender"] - families = ["model"] - label = "Transform Zero" - actions = [ayon_blender.api.action.SelectInvalidAction, - RepairAction] - - _identity = mathutils.Matrix() - - @classmethod - def get_invalid(cls, instance) -> List: - invalid = [] - for obj in instance: - if ( - isinstance(obj, bpy.types.Object) - and obj.matrix_basis != cls._identity - ): - invalid.append(obj) - return invalid - - def process(self, instance): - if not self.is_active(instance.data): - return - - invalid = self.get_invalid(instance) - if invalid: - names = ", ".join(obj.name for obj in invalid) - raise PublishValidationError( - "Objects found in instance which do not" - f" have transform set to zero: {names}", - description=self.get_description() - ) - - @classmethod - def repair(cls, instance): - - invalid = cls.get_invalid(instance) - if not invalid: - return - - context = plugin.create_blender_context( - active=invalid[0], selected=invalid - ) - with lib.maintained_selection(): - with bpy.context.temp_override(**context): - plugin.deselect_all() - for obj in invalid: - obj.select_set(True) - - # TODO: Preferably this does allow custom pivot point locations - # and if so, this should likely apply to the delta instead - # using `bpy.ops.object.transforms_to_deltas(mode="ALL")` - bpy.ops.object.transform_apply(location=True, - rotation=True, - scale=True) - - def get_description(self): - return inspect.cleandoc( - """## Transforms can't have any values. - - The location, rotation and scale on the transform must be at - the default values. This also goes for the delta transforms. - - To solve this issue, try freezing the transforms: - - `Object` > `Apply` > `All Transforms` - - Using the Repair action directly will do the same. - - So long as the transforms, rotation and scale values are zero, - you're all good. - """ - ) diff --git a/server_addon/blender/client/ayon_blender/version.py b/server_addon/blender/client/ayon_blender/version.py deleted file mode 100644 index c21b06a8de..0000000000 --- a/server_addon/blender/client/ayon_blender/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring AYON addon 'blender' version.""" -__version__ = "0.2.1" diff --git a/server_addon/blender/package.py b/server_addon/blender/package.py deleted file mode 100644 index 1b595e22da..0000000000 --- a/server_addon/blender/package.py +++ /dev/null @@ -1,11 +0,0 @@ -name = "blender" -title = "Blender" -version = "0.2.1" - -client_dir = "ayon_blender" - -ayon_required_addons = { - "core": ">0.3.2", -} -ayon_compatible_addons = {} - diff --git a/server_addon/blender/server/__init__.py b/server_addon/blender/server/__init__.py deleted file mode 100644 index b274e3bc29..0000000000 --- a/server_addon/blender/server/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Type - -from ayon_server.addons import BaseServerAddon - -from .settings import BlenderSettings, DEFAULT_VALUES - - -class BlenderAddon(BaseServerAddon): - settings_model: Type[BlenderSettings] = BlenderSettings - - async def get_default_settings(self): - settings_model_cls = self.get_settings_model() - return settings_model_cls(**DEFAULT_VALUES) diff --git a/server_addon/blender/server/settings/__init__.py b/server_addon/blender/server/settings/__init__.py deleted file mode 100644 index 3d51e5c3e1..0000000000 --- a/server_addon/blender/server/settings/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from .main import ( - BlenderSettings, - DEFAULT_VALUES, -) - - -__all__ = ( - "BlenderSettings", - "DEFAULT_VALUES", -) diff --git a/server_addon/blender/server/settings/imageio.py b/server_addon/blender/server/settings/imageio.py deleted file mode 100644 index 06eec09e7b..0000000000 --- a/server_addon/blender/server/settings/imageio.py +++ /dev/null @@ -1,63 +0,0 @@ -from pydantic import validator -from ayon_server.settings import BaseSettingsModel, SettingsField -from ayon_server.settings.validators import ensure_unique_names - - -class ImageIOConfigModel(BaseSettingsModel): - """[DEPRECATED] Addon OCIO config settings. Please set the OCIO config - path in the Core addon profiles here - (ayon+settings://core/imageio/ocio_config_profiles). - """ - - override_global_config: bool = SettingsField( - False, - title="Override global OCIO config", - description=( - "DEPRECATED functionality. Please set the OCIO config path in the " - "Core addon profiles here (ayon+settings://core/imageio/" - "ocio_config_profiles)." - ), - ) - filepath: list[str] = SettingsField( - default_factory=list, - title="Config path", - description=( - "DEPRECATED functionality. Please set the OCIO config path in the " - "Core addon profiles here (ayon+settings://core/imageio/" - "ocio_config_profiles)." - ), - ) - - -class ImageIOFileRuleModel(BaseSettingsModel): - name: str = SettingsField("", title="Rule name") - pattern: str = SettingsField("", title="Regex pattern") - colorspace: str = SettingsField("", title="Colorspace name") - ext: str = SettingsField("", title="File extension") - - -class ImageIOFileRulesModel(BaseSettingsModel): - activate_host_rules: bool = SettingsField(False) - rules: list[ImageIOFileRuleModel] = SettingsField( - default_factory=list, - title="Rules" - ) - - @validator("rules") - def validate_unique_outputs(cls, value): - ensure_unique_names(value) - return value - - -class BlenderImageIOModel(BaseSettingsModel): - activate_host_color_management: bool = SettingsField( - True, title="Enable Color Management" - ) - ocio_config: ImageIOConfigModel = SettingsField( - default_factory=ImageIOConfigModel, - title="OCIO config" - ) - file_rules: ImageIOFileRulesModel = SettingsField( - default_factory=ImageIOFileRulesModel, - title="File Rules" - ) diff --git a/server_addon/blender/server/settings/main.py b/server_addon/blender/server/settings/main.py deleted file mode 100644 index 3cca22ae3b..0000000000 --- a/server_addon/blender/server/settings/main.py +++ /dev/null @@ -1,70 +0,0 @@ -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - TemplateWorkfileBaseOptions, -) - -from .imageio import BlenderImageIOModel -from .publish_plugins import ( - PublishPluginsModel, - DEFAULT_BLENDER_PUBLISH_SETTINGS -) -from .render_settings import ( - RenderSettingsModel, - DEFAULT_RENDER_SETTINGS -) - - -class UnitScaleSettingsModel(BaseSettingsModel): - enabled: bool = SettingsField(True, title="Enabled") - apply_on_opening: bool = SettingsField( - False, title="Apply on Opening Existing Files") - base_file_unit_scale: float = SettingsField( - 1.0, title="Base File Unit Scale" - ) - - -class BlenderSettings(BaseSettingsModel): - unit_scale_settings: UnitScaleSettingsModel = SettingsField( - default_factory=UnitScaleSettingsModel, - title="Set Unit Scale" - ) - set_resolution_startup: bool = SettingsField( - True, - title="Set Resolution on Startup" - ) - set_frames_startup: bool = SettingsField( - True, - title="Set Start/End Frames and FPS on Startup" - ) - imageio: BlenderImageIOModel = SettingsField( - default_factory=BlenderImageIOModel, - title="Color Management (ImageIO)" - ) - RenderSettings: RenderSettingsModel = SettingsField( - default_factory=RenderSettingsModel, title="Render Settings") - workfile_builder: TemplateWorkfileBaseOptions = SettingsField( - default_factory=TemplateWorkfileBaseOptions, - title="Workfile Builder" - ) - publish: PublishPluginsModel = SettingsField( - default_factory=PublishPluginsModel, - title="Publish Plugins" - ) - - -DEFAULT_VALUES = { - "unit_scale_settings": { - "enabled": True, - "apply_on_opening": False, - "base_file_unit_scale": 1.00 - }, - "set_frames_startup": True, - "set_resolution_startup": True, - "RenderSettings": DEFAULT_RENDER_SETTINGS, - "publish": DEFAULT_BLENDER_PUBLISH_SETTINGS, - "workfile_builder": { - "create_first_version": False, - "custom_templates": [] - } -} diff --git a/server_addon/blender/server/settings/publish_plugins.py b/server_addon/blender/server/settings/publish_plugins.py deleted file mode 100644 index 8db8c5be46..0000000000 --- a/server_addon/blender/server/settings/publish_plugins.py +++ /dev/null @@ -1,361 +0,0 @@ -import json -from pydantic import validator -from ayon_server.exceptions import BadRequestException -from ayon_server.settings import BaseSettingsModel, SettingsField - - -def validate_json_dict(value): - if not value.strip(): - return "{}" - try: - converted_value = json.loads(value) - success = isinstance(converted_value, dict) - except json.JSONDecodeError: - success = False - - if not success: - raise BadRequestException( - "Environment's can't be parsed as json object" - ) - return value - - -class ValidatePluginModel(BaseSettingsModel): - enabled: bool = SettingsField(True) - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - - -class ValidateFileSavedModel(BaseSettingsModel): - enabled: bool = SettingsField(title="ValidateFileSaved") - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - exclude_families: list[str] = SettingsField( - default_factory=list, - title="Exclude product types" - ) - - -class ExtractBlendModel(BaseSettingsModel): - enabled: bool = SettingsField(True) - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - families: list[str] = SettingsField( - default_factory=list, - title="Families" - ) - compress: bool = SettingsField(True, title="Compress") - - -class ExtractBlendAnimationModel(BaseSettingsModel): - enabled: bool = SettingsField(True) - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - compress: bool = SettingsField(False, title="Compress") - - -class ExtractPlayblastModel(BaseSettingsModel): - enabled: bool = SettingsField(True) - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - presets: str = SettingsField("", title="Presets", widget="textarea") - compress: bool = SettingsField(False, title="Compress") - - @validator("presets") - def validate_json(cls, value): - return validate_json_dict(value) - - -class PublishPluginsModel(BaseSettingsModel): - ValidateCameraZeroKeyframe: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Validate Camera Zero Keyframe", - section="General Validators" - ) - ValidateFileSaved: ValidateFileSavedModel = SettingsField( - default_factory=ValidateFileSavedModel, - title="Validate File Saved", - ) - ValidateInstanceEmpty: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Validate Instance is not Empty" - ) - ValidateMeshHasUvs: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Validate Mesh Has Uvs", - section="Model Validators" - ) - ValidateMeshNoNegativeScale: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Validate Mesh No Negative Scale" - ) - ValidateModelMeshUvMap1: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Validate Model Mesh Has UV map named map1" - ) - ValidateTransformZero: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Validate Transform Zero" - ) - ValidateNoColonsInName: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Validate No Colons In Name" - ) - ValidateRenderCameraIsSet: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Validate Render Camera Is Set", - section="Render Validators" - ) - ValidateDeadlinePublish: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Validate Render Output for Deadline", - ) - ExtractBlend: ExtractBlendModel = SettingsField( - default_factory=ExtractBlendModel, - title="Extract Blend", - section="Extractors" - ) - ExtractFBX: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Extract FBX" - ) - ExtractModelABC: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Extract ABC" - ) - ExtractBlendAnimation: ExtractBlendAnimationModel = SettingsField( - default_factory=ExtractBlendAnimationModel, - title="Extract Blend Animation" - ) - ExtractAnimationFBX: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Extract Animation FBX" - ) - ExtractCamera: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Extract Camera" - ) - ExtractCameraABC: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Extract Camera as ABC" - ) - ExtractLayout: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Extract Layout (JSON)" - ) - ExtractThumbnail: ExtractPlayblastModel = SettingsField( - default_factory=ExtractPlayblastModel, - title="Extract Thumbnail" - ) - ExtractPlayblast: ExtractPlayblastModel = SettingsField( - default_factory=ExtractPlayblastModel, - title="Extract Playblast" - ) - ExtractModelUSD: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, - title="Extract Model USD" - ) - - -DEFAULT_BLENDER_PUBLISH_SETTINGS = { - "ValidateCameraZeroKeyframe": { - "enabled": False, - "optional": True, - "active": True - }, - "ValidateFileSaved": { - "enabled": True, - "optional": False, - "active": True, - "exclude_families": [] - }, - "ValidateRenderCameraIsSet": { - "enabled": True, - "optional": False, - "active": True - }, - "ValidateDeadlinePublish": { - "enabled": True, - "optional": False, - "active": True - }, - "ValidateMeshHasUvs": { - "enabled": True, - "optional": True, - "active": True - }, - "ValidateMeshNoNegativeScale": { - "enabled": True, - "optional": False, - "active": True - }, - "ValidateModelMeshUvMap1": { - "enabled": False, - "optional": True, - "active": True - }, - "ValidateTransformZero": { - "enabled": False, - "optional": True, - "active": True - }, - "ValidateNoColonsInName": { - "enabled": False, - "optional": True, - "active": True - }, - "ValidateInstanceEmpty": { - "enabled": True, - "optional": False, - "active": True - }, - "ExtractBlend": { - "enabled": True, - "optional": True, - "active": True, - "families": [ - "model", - "camera", - "rig", - "action", - "layout", - "blendScene" - ], - "compress": False - }, - "ExtractFBX": { - "enabled": False, - "optional": True, - "active": True - }, - "ExtractModelABC": { - "enabled": True, - "optional": True, - "active": True - }, - "ExtractBlendAnimation": { - "enabled": True, - "optional": True, - "active": True, - "compress": False - }, - "ExtractAnimationFBX": { - "enabled": False, - "optional": True, - "active": True - }, - "ExtractCamera": { - "enabled": True, - "optional": True, - "active": True - }, - "ExtractCameraABC": { - "enabled": True, - "optional": True, - "active": True - }, - "ExtractLayout": { - "enabled": True, - "optional": True, - "active": False - }, - "ExtractThumbnail": { - "enabled": True, - "optional": True, - "active": True, - "presets": json.dumps( - { - "model": { - "image_settings": { - "file_format": "JPEG", - "color_mode": "RGB", - "quality": 100 - }, - "display_options": { - "shading": { - "light": "STUDIO", - "studio_light": "Default", - "type": "SOLID", - "color_type": "OBJECT", - "show_xray": False, - "show_shadows": False, - "show_cavity": True - }, - "overlay": { - "show_overlays": False - } - } - }, - "rig": { - "image_settings": { - "file_format": "JPEG", - "color_mode": "RGB", - "quality": 100 - }, - "display_options": { - "shading": { - "light": "STUDIO", - "studio_light": "Default", - "type": "SOLID", - "color_type": "OBJECT", - "show_xray": True, - "show_shadows": False, - "show_cavity": False - }, - "overlay": { - "show_overlays": True, - "show_ortho_grid": False, - "show_floor": False, - "show_axis_x": False, - "show_axis_y": False, - "show_axis_z": False, - "show_text": False, - "show_stats": False, - "show_cursor": False, - "show_annotation": False, - "show_extras": False, - "show_relationship_lines": False, - "show_outline_selected": False, - "show_motion_paths": False, - "show_object_origins": False, - "show_bones": True - } - } - } - }, - indent=4, - ) - }, - "ExtractPlayblast": { - "enabled": True, - "optional": True, - "active": True, - "presets": json.dumps( - { - "default": { - "image_settings": { - "file_format": "PNG", - "color_mode": "RGB", - "color_depth": "8", - "compression": 15 - }, - "display_options": { - "shading": { - "type": "MATERIAL", - "render_pass": "COMBINED" - }, - "overlay": { - "show_overlays": False - } - } - } - }, - indent=4 - ) - }, - "ExtractModelUSD": { - "enabled": True, - "optional": True, - "active": True - } -} diff --git a/server_addon/blender/server/settings/render_settings.py b/server_addon/blender/server/settings/render_settings.py deleted file mode 100644 index f992ea6fcc..0000000000 --- a/server_addon/blender/server/settings/render_settings.py +++ /dev/null @@ -1,158 +0,0 @@ -"""Providing models and values for Blender Render Settings.""" -from ayon_server.settings import BaseSettingsModel, SettingsField - - -def aov_separators_enum(): - return [ - {"value": "dash", "label": "- (dash)"}, - {"value": "underscore", "label": "_ (underscore)"}, - {"value": "dot", "label": ". (dot)"} - ] - - -def image_format_enum(): - return [ - {"value": "exr", "label": "OpenEXR"}, - {"value": "bmp", "label": "BMP"}, - {"value": "rgb", "label": "Iris"}, - {"value": "png", "label": "PNG"}, - {"value": "jpg", "label": "JPEG"}, - {"value": "jp2", "label": "JPEG 2000"}, - {"value": "tga", "label": "Targa"}, - {"value": "tif", "label": "TIFF"}, - ] - - -def renderers_enum(): - return [ - {"value": "CYCLES", "label": "Cycles"}, - {"value": "BLENDER_EEVEE", "label": "Eevee"}, - ] - - -def aov_list_enum(): - return [ - {"value": "empty", "label": "< none >"}, - {"value": "combined", "label": "Combined"}, - {"value": "z", "label": "Z"}, - {"value": "mist", "label": "Mist"}, - {"value": "normal", "label": "Normal"}, - {"value": "position", "label": "Position (Cycles Only)"}, - {"value": "vector", "label": "Vector (Cycles Only)"}, - {"value": "uv", "label": "UV (Cycles Only)"}, - {"value": "denoising", "label": "Denoising Data (Cycles Only)"}, - {"value": "object_index", "label": "Object Index (Cycles Only)"}, - {"value": "material_index", "label": "Material Index (Cycles Only)"}, - {"value": "sample_count", "label": "Sample Count (Cycles Only)"}, - {"value": "diffuse_light", "label": "Diffuse Light/Direct"}, - { - "value": "diffuse_indirect", - "label": "Diffuse Indirect (Cycles Only)" - }, - {"value": "diffuse_color", "label": "Diffuse Color"}, - {"value": "specular_light", "label": "Specular (Glossy) Light/Direct"}, - { - "value": "specular_indirect", - "label": "Specular (Glossy) Indirect (Cycles Only)" - }, - {"value": "specular_color", "label": "Specular (Glossy) Color"}, - { - "value": "transmission_light", - "label": "Transmission Light/Direct (Cycles Only)" - }, - { - "value": "transmission_indirect", - "label": "Transmission Indirect (Cycles Only)" - }, - { - "value": "transmission_color", - "label": "Transmission Color (Cycles Only)" - }, - {"value": "volume_light", "label": "Volume Light/Direct"}, - {"value": "volume_indirect", "label": "Volume Indirect (Cycles Only)"}, - {"value": "emission", "label": "Emission"}, - {"value": "environment", "label": "Environment"}, - {"value": "shadow", "label": "Shadow/Shadow Catcher"}, - {"value": "ao", "label": "Ambient Occlusion"}, - {"value": "bloom", "label": "Bloom (Eevee Only)"}, - {"value": "transparent", "label": "Transparent (Eevee Only)"}, - {"value": "cryptomatte_object", "label": "Cryptomatte Object"}, - {"value": "cryptomatte_material", "label": "Cryptomatte Material"}, - {"value": "cryptomatte_asset", "label": "Cryptomatte Asset"}, - { - "value": "cryptomatte_accurate", - "label": "Cryptomatte Accurate Mode (Eevee Only)" - }, - ] - - -def custom_passes_types_enum(): - return [ - {"value": "COLOR", "label": "Color"}, - {"value": "VALUE", "label": "Value"}, - ] - - -class CustomPassesModel(BaseSettingsModel): - """Custom Passes""" - _layout = "compact" - - attribute: str = SettingsField("", title="Attribute name") - value: str = SettingsField( - "COLOR", - title="Type", - enum_resolver=custom_passes_types_enum - ) - - -class RenderSettingsModel(BaseSettingsModel): - default_render_image_folder: str = SettingsField( - title="Default Render Image Folder" - ) - aov_separator: str = SettingsField( - "underscore", - title="AOV Separator Character", - enum_resolver=aov_separators_enum - ) - image_format: str = SettingsField( - "exr", - title="Image Format", - enum_resolver=image_format_enum - ) - multilayer_exr: bool = SettingsField( - title="Multilayer (EXR)" - ) - renderer: str = SettingsField( - "CYCLES", - title="Renderer", - enum_resolver=renderers_enum - ) - compositing: bool = SettingsField( - title="Enable Compositing" - ) - aov_list: list[str] = SettingsField( - default_factory=list, - enum_resolver=aov_list_enum, - title="AOVs to create" - ) - custom_passes: list[CustomPassesModel] = SettingsField( - default_factory=list, - title="Custom Passes", - description=( - "Add custom AOVs. They are added to the view layer and in the " - "Compositing Nodetree,\nbut they need to be added manually to " - "the Shader Nodetree." - ) - ) - - -DEFAULT_RENDER_SETTINGS = { - "default_render_image_folder": "renders/blender", - "aov_separator": "underscore", - "image_format": "exr", - "multilayer_exr": True, - "renderer": "CYCLES", - "compositing": True, - "aov_list": ["combined"], - "custom_passes": [] -}