diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 655ffe289e..5a0ed1ae1d 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -1,6 +1,6 @@ name: Bug Report description: File a bug report -title: 'Bug: ' +title: '' labels: - 'type: bug' body: diff --git a/.github/ISSUE_TEMPLATE/enhancement_request.yml b/.github/ISSUE_TEMPLATE/enhancement_request.yml index 52b49e0481..da4d0d9319 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_request.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_request.yml @@ -1,6 +1,6 @@ name: Enhancement Request description: Create a report to help us enhance a particular feature -title: "Enhancement: " +title: "" labels: - "type: enhancement" body: diff --git a/.github/pr-glob-labeler.yml b/.github/pr-glob-labeler.yml new file mode 100644 index 0000000000..286e7768b5 --- /dev/null +++ b/.github/pr-glob-labeler.yml @@ -0,0 +1,102 @@ +# Add type: unittest label if any changes in tests folders +'type: unittest': +- '*/*tests*/**/*' + +# any changes in documentation structure +'type: documentation': +- '*/**/*website*/**/*' +- '*/**/*docs*/**/*' + +# hosts triage +'host: Nuke': +- '*/**/*nuke*' +- '*/**/*nuke*/**/*' + +'host: Photoshop': +- '*/**/*photoshop*' +- '*/**/*photoshop*/**/*' + +'host: Harmony': +- '*/**/*harmony*' +- '*/**/*harmony*/**/*' + +'host: UE': +- '*/**/*unreal*' +- '*/**/*unreal*/**/*' + +'host: Houdini': +- '*/**/*houdini*' +- '*/**/*houdini*/**/*' + +'host: Maya': +- '*/**/*maya*' +- '*/**/*maya*/**/*' + +'host: Resolve': +- '*/**/*resolve*' +- '*/**/*resolve*/**/*' + +'host: Blender': +- '*/**/*blender*' +- '*/**/*blender*/**/*' + +'host: Hiero': +- '*/**/*hiero*' +- '*/**/*hiero*/**/*' + +'host: Fusion': +- '*/**/*fusion*' +- '*/**/*fusion*/**/*' + +'host: Flame': +- '*/**/*flame*' +- '*/**/*flame*/**/*' + +'host: TrayPublisher': +- '*/**/*traypublisher*' +- '*/**/*traypublisher*/**/*' + +'host: 3dsmax': +- '*/**/*max*' +- '*/**/*max*/**/*' + +'host: TV Paint': +- '*/**/*tvpaint*' +- '*/**/*tvpaint*/**/*' + +'host: CelAction': +- '*/**/*celaction*' +- '*/**/*celaction*/**/*' + +'host: After Effects': +- '*/**/*aftereffects*' +- '*/**/*aftereffects*/**/*' + +'host: Substance Painter': +- '*/**/*substancepainter*' +- '*/**/*substancepainter*/**/*' + +# modules triage +'module: Deadline': +- '*/**/*deadline*' +- '*/**/*deadline*/**/*' + +'module: RoyalRender': +- '*/**/*royalrender*' +- '*/**/*royalrender*/**/*' + +'module: Sitesync': +- '*/**/*sync_server*' +- '*/**/*sync_server*/**/*' + +'module: Ftrack': +- '*/**/*ftrack*' +- '*/**/*ftrack*/**/*' + +'module: Shotgrid': +- '*/**/*shotgrid*' +- '*/**/*shotgrid*/**/*' + +'module: Kitsu': +- '*/**/*kitsu*' +- '*/**/*kitsu*/**/*' diff --git a/.github/workflows/issue_to_clickup_trigger.yml b/.github/workflows/issue_to_clickup_trigger.yml new file mode 100644 index 0000000000..d3c026b4a5 --- /dev/null +++ b/.github/workflows/issue_to_clickup_trigger.yml @@ -0,0 +1,28 @@ +name: Sync Issues to ClickUp [trigger] + +on: + workflow_dispatch: + inputs: + issue-number: + required: true + issues: + types: [labeled] + + +jobs: + call-ci-tools-issue-sync: + if: github.event.inputs.issue-number != '' || github.event_name == 'issues' && contains(github.event.issue.labels.*.name, 'backlog') + uses: ynput/ci-tools/.github/workflows/issue_to_clickup_ref.yml@main + with: + # issue number should be taken either from inputs or from the event + issue-number: ${{ github.event.inputs.issue-number || github.event.issue.number }} + repo-owner: ${{ github.event.repository.owner.login }} + repo-name: ${{ github.event.repository.name }} + secrets: + token: ${{ secrets.YNPUT_BOT_TOKEN }} + cu_api_key: ${{ secrets.CLICKUP_API_KEY }} + cu_team_id: ${{ secrets.CLICKUP_TEAM_ID }} + cu_folder_id: ${{ secrets.CLICKUP_FOLDER_ID }} + cu_list_id: ${{ secrets.CLICKUP_LIST_ID }} + cu_field_domain_id: ${{ secrets.CLICKUP_DOMAIN_FIELD_ID }} + cu_field_type_id: ${{ secrets.CLICKUP_ISSUETYPE_FIELD_ID }} diff --git a/client/ayon_core/__init__.py b/client/ayon_core/__init__.py index c9c0dfc614..5f9eb6cea3 100644 --- a/client/ayon_core/__init__.py +++ b/client/ayon_core/__init__.py @@ -7,3 +7,6 @@ AYON_CORE_ROOT = os.path.dirname(os.path.abspath(__file__)) PACKAGE_DIR = AYON_CORE_ROOT PLUGINS_DIR = os.path.join(AYON_CORE_ROOT, "plugins") AYON_SERVER_ENABLED = True + +# Indicate if AYON entities should be used instead of OpenPype entities +USE_AYON_ENTITIES = False diff --git a/client/ayon_core/addon/README.md b/client/ayon_core/addon/README.md index b793b0ffb4..a15e8bdc69 100644 --- a/client/ayon_core/addon/README.md +++ b/client/ayon_core/addon/README.md @@ -31,7 +31,7 @@ AYON addons should contain separated logic of specific kind of implementation, s - addon must implement `get_plugin_paths` which must return dictionary with possible keys `"publish"`, `"load"`, `"create"` or `"actions"` - each key may contain list or string with a path to directory with plugins -## ITrayModule +## ITrayAddon - addon has more logic when used in a tray - it is possible that addon can be used only in the tray - abstract methods @@ -46,7 +46,7 @@ AYON addons should contain separated logic of specific kind of implementation, s - if addon has logic only in tray or for both then should be checking for `tray_initialized` attribute to decide how should handle situations ### ITrayService -- inherits from `ITrayModule` and implements `tray_menu` method for you +- inherits from `ITrayAddon` and implements `tray_menu` method for you - adds action to submenu "Services" in tray widget menu with icon and label - abstract attribute `label` - label shown in menu @@ -57,7 +57,7 @@ AYON addons should contain separated logic of specific kind of implementation, s - these states must be set by addon itself `set_service_running` is default state on initialization ### ITrayAction -- inherits from `ITrayModule` and implements `tray_menu` method for you +- inherits from `ITrayAddon` and implements `tray_menu` method for you - adds action to tray widget menu with label - abstract attribute `label` - label shown in menu @@ -89,4 +89,4 @@ AYON addons should contain separated logic of specific kind of implementation, s ### TrayAddonsManager - inherits from `AddonsManager` -- has specific implementation for Pype Tray tool and handle `ITrayModule` methods +- has specific implementation for Pype Tray tool and handle `ITrayAddon` methods diff --git a/client/ayon_core/addon/base.py b/client/ayon_core/addon/base.py index a3920c4acb..f0763649ca 100644 --- a/client/ayon_core/addon/base.py +++ b/client/ayon_core/addon/base.py @@ -15,13 +15,9 @@ from abc import ABCMeta, abstractmethod import six import appdirs -from ayon_core.lib import Logger +from ayon_core.lib import Logger, is_dev_mode_enabled from ayon_core.client import get_ayon_server_api_connection -from ayon_core.settings import get_system_settings -from ayon_core.settings.ayon_settings import ( - is_dev_mode_enabled, - get_ayon_settings, -) +from ayon_core.settings import get_studio_settings from .interfaces import ( IPluginPaths, @@ -648,7 +644,6 @@ class AddonsManager: def __init__(self, settings=None, initialize=True): self._settings = settings - self._system_settings = None self._addons = [] self._addons_by_id = {} @@ -738,14 +733,9 @@ class AddonsManager: # Prepare settings for addons settings = self._settings if settings is None: - settings = get_ayon_settings() + settings = get_studio_settings() - # OpenPype settings - system_settings = self._system_settings - if system_settings is None: - system_settings = get_system_settings() - - modules_settings = system_settings["modules"] + modules_settings = {} report = {} time_start = time.time() @@ -788,6 +778,7 @@ class AddonsManager: addon_classes.append(modules_item) + aliased_names = [] for addon_cls in addon_classes: name = addon_cls.__name__ if issubclass(addon_cls, OpenPypeModule): @@ -807,6 +798,13 @@ class AddonsManager: self._addons.append(addon) self._addons_by_id[addon.id] = addon self._addons_by_name[addon.name] = addon + # NOTE This will be removed with release 1.0.0 of ayon-core + # please use carefully. + # Gives option to use alias name for addon for cases when + # name in OpenPype was not the same as in AYON. + name_alias = getattr(addon, "openpype_alias", None) + if name_alias: + aliased_names.append((name_alias, addon)) enabled_str = "X" if not addon.enabled: enabled_str = " " @@ -822,6 +820,17 @@ class AddonsManager: exc_info=True ) + for item in aliased_names: + name_alias, addon = item + if name_alias not in self._addons_by_name: + self._addons_by_name[name_alias] = addon + continue + self.log.warning( + "Alias name '{}' of addon '{}' is already assigned.".format( + name_alias, addon.name + ) + ) + if self._report is not None: report[self._report_total_key] = time.time() - time_start self._report["Initialization"] = report diff --git a/client/ayon_core/addon/click_wrap.py b/client/ayon_core/addon/click_wrap.py index d49188312d..911a5a5707 100644 --- a/client/ayon_core/addon/click_wrap.py +++ b/client/ayon_core/addon/click_wrap.py @@ -15,7 +15,7 @@ method to convert 'click_wrap' object to 'click' object. Before ```python import click -from ayon_core.modules import AYONAddon +from ayon_core.addon import AYONAddon class ExampleAddon(AYONAddon): @@ -40,7 +40,7 @@ def mycommand(arg1, arg2): Now ``` from ayon_core import click_wrap -from ayon_core.modules import AYONAddon +from ayon_core.addon import AYONAddon class ExampleAddon(AYONAddon): @@ -72,7 +72,7 @@ Added small enhancements: Example: ```python from ayon_core import click_wrap - from ayon_core.modules import AYONAddon + from ayon_core.addon import AYONAddon class ExampleAddon(AYONAddon): diff --git a/client/ayon_core/cli_commands.py b/client/ayon_core/cli_commands.py index c07b72afdf..a24710aef2 100644 --- a/client/ayon_core/cli_commands.py +++ b/client/ayon_core/cli_commands.py @@ -73,6 +73,20 @@ class Commands: import pyblish.api import pyblish.util + # Fix older jobs + for src_key, dst_key in ( + ("AVALON_PROJECT", "AYON_PROJECT_NAME"), + ("AVALON_ASSET", "AYON_FOLDER_PATH"), + ("AVALON_TASK", "AYON_TASK_NAME"), + ("AVALON_WORKDIR", "AYON_WORKDIR"), + ("AVALON_APP_NAME", "AYON_APP_NAME"), + ("AVALON_APP", "AYON_HOST_NAME"), + ): + if src_key in os.environ and dst_key not in os.environ: + os.environ[dst_key] = os.environ[src_key] + # Remove old keys, so we're sure they're not used + os.environ.pop(src_key, None) + log = Logger.get_logger("CLI-publish") install_ayon_plugins() @@ -87,12 +101,12 @@ class Commands: if not any(paths): raise RuntimeError("No publish paths specified") - app_full_name = os.getenv("AVALON_APP_NAME") + app_full_name = os.getenv("AYON_APP_NAME") if app_full_name: context = get_global_context() env = get_app_environments_for_context( context["project_name"], - context["asset_name"], + context["folder_path"], context["task_name"], app_full_name, launch_type=LaunchTypes.farm_publish, diff --git a/client/ayon_core/hooks/pre_add_last_workfile_arg.py b/client/ayon_core/hooks/pre_add_last_workfile_arg.py index 8144afd401..d11bb106d6 100644 --- a/client/ayon_core/hooks/pre_add_last_workfile_arg.py +++ b/client/ayon_core/hooks/pre_add_last_workfile_arg.py @@ -27,7 +27,8 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook): "tvpaint", "substancepainter", "aftereffects", - "wrap" + "wrap", + "openrv" } launch_types = {LaunchTypes.local} diff --git a/client/ayon_core/hooks/pre_copy_template_workfile.py b/client/ayon_core/hooks/pre_copy_template_workfile.py index e5d2d4f640..df2a0386b2 100644 --- a/client/ayon_core/hooks/pre_copy_template_workfile.py +++ b/client/ayon_core/hooks/pre_copy_template_workfile.py @@ -54,7 +54,7 @@ class CopyTemplateWorkfile(PreLaunchHook): self.log.info("Last workfile does not exist.") project_name = self.data["project_name"] - asset_name = self.data["asset_name"] + asset_name = self.data["folder_path"] task_name = self.data["task_name"] host_name = self.application.host_name diff --git a/client/ayon_core/hooks/pre_create_extra_workdir_folders.py b/client/ayon_core/hooks/pre_create_extra_workdir_folders.py index 6116d5fbd3..72c6bf2f68 100644 --- a/client/ayon_core/hooks/pre_create_extra_workdir_folders.py +++ b/client/ayon_core/hooks/pre_create_extra_workdir_folders.py @@ -21,7 +21,7 @@ class CreateWorkdirExtraFolders(PreLaunchHook): return env = self.data.get("env") or {} - workdir = env.get("AVALON_WORKDIR") + workdir = env.get("AYON_WORKDIR") if not workdir or not os.path.exists(workdir): return diff --git a/client/ayon_core/hooks/pre_global_host_data.py b/client/ayon_core/hooks/pre_global_host_data.py index 3422c87484..de6d4acc8b 100644 --- a/client/ayon_core/hooks/pre_global_host_data.py +++ b/client/ayon_core/hooks/pre_global_host_data.py @@ -22,7 +22,7 @@ class GlobalHostDataHook(PreLaunchHook): app = self.launch_context.application temp_data = EnvironmentPrepData({ "project_name": self.data["project_name"], - "asset_name": self.data["asset_name"], + "folder_path": self.data["folder_path"], "task_name": self.data["task_name"], "app": app, @@ -66,7 +66,7 @@ class GlobalHostDataHook(PreLaunchHook): project_doc = get_project(project_name) self.data["project_doc"] = project_doc - asset_name = self.data.get("asset_name") + asset_name = self.data.get("folder_path") if not asset_name: self.log.warning( "Asset name was not set. Skipping asset document query." diff --git a/client/ayon_core/hooks/pre_ocio_hook.py b/client/ayon_core/hooks/pre_ocio_hook.py index 00ba9a3bcb..08d9563975 100644 --- a/client/ayon_core/hooks/pre_ocio_hook.py +++ b/client/ayon_core/hooks/pre_ocio_hook.py @@ -19,6 +19,7 @@ class OCIOEnvHook(PreLaunchHook): "nuke", "hiero", "resolve", + "openrv" } launch_types = set() @@ -27,10 +28,10 @@ class OCIOEnvHook(PreLaunchHook): template_data = get_template_data_with_names( project_name=self.data["project_name"], - asset_name=self.data["asset_name"], + asset_name=self.data["folder_path"], task_name=self.data["task_name"], host_name=self.host_name, - system_settings=self.data["system_settings"] + settings=self.data["project_settings"] ) config_data = get_imageio_config( diff --git a/client/ayon_core/host/dirmap.py b/client/ayon_core/host/dirmap.py index cecd689a4c..effafb6261 100644 --- a/client/ayon_core/host/dirmap.py +++ b/client/ayon_core/host/dirmap.py @@ -92,8 +92,8 @@ class HostDirmap(object): self.on_enable_dirmap() - for k, sp in enumerate(mapping["source-path"]): - dst = mapping["destination-path"][k] + for k, sp in enumerate(mapping["source_path"]): + dst = mapping["destination_path"][k] try: # add trailing slash if missing sp = os.path.join(sp, '') @@ -116,7 +116,7 @@ class HostDirmap(object): continue def get_mappings(self): - """Get translation from source-path to destination-path. + """Get translation from source_path to destination_path. It checks if Site Sync is enabled and user chose to use local site, in that case configuration in Local Settings takes precedence @@ -138,8 +138,8 @@ class HostDirmap(object): if ( not mapping - or not mapping.get("destination-path") - or not mapping.get("source-path") + or not mapping.get("destination_path") + or not mapping.get("source_path") ): return {} self.log.info("Processing directory mapping ...") @@ -154,7 +154,7 @@ class HostDirmap(object): in Local Settings. Returns: - dict : { "source-path": [XXX], "destination-path": [YYYY]} + dict : { "source_path": [XXX], "destination_path": [YYYY]} """ project_name = self.project_name @@ -181,6 +181,10 @@ class HostDirmap(object): exclude_locals=False, cached=False) + # TODO implement + # Dirmap is dependent on 'get_site_local_overrides' which + # is not implemented in AYON. The mapping should be received + # from sitesync addon. active_overrides = get_site_local_overrides( project_name, active_site) remote_overrides = get_site_local_overrides( @@ -210,13 +214,13 @@ class HostDirmap(object): continue if os.path.isdir(active_site_dir): - if "destination-path" not in mapping: - mapping["destination-path"] = [] - mapping["destination-path"].append(active_site_dir) + if "destination_path" not in mapping: + mapping["destination_path"] = [] + mapping["destination_path"].append(active_site_dir) - if "source-path" not in mapping: - mapping["source-path"] = [] - mapping["source-path"].append(remote_site_dir) + if "source_path" not in mapping: + mapping["source_path"] = [] + mapping["source_path"].append(remote_site_dir) self.log.debug("local sync mapping:: {}".format(mapping)) return mapping diff --git a/client/ayon_core/host/host.py b/client/ayon_core/host/host.py index 6d129e18d9..f79c22824b 100644 --- a/client/ayon_core/host/host.py +++ b/client/ayon_core/host/host.py @@ -49,7 +49,6 @@ class HostBase(object): Todo: - move content of 'install_host' as method of this class - register host object - - install legacy_io - install global plugin paths - store registered plugin paths to this object - handle current context (project, asset, task) @@ -107,7 +106,7 @@ class HostBase(object): Union[str, None]: Current project name. """ - return os.environ.get("AVALON_PROJECT") + return os.environ.get("AYON_PROJECT_NAME") def get_current_asset_name(self): """ @@ -115,7 +114,7 @@ class HostBase(object): Union[str, None]: Current asset name. """ - return os.environ.get("AVALON_ASSET") + return os.environ.get("AYON_FOLDER_PATH") def get_current_task_name(self): """ @@ -123,7 +122,7 @@ class HostBase(object): Union[str, None]: Current task name. """ - return os.environ.get("AVALON_TASK") + return os.environ.get("AYON_TASK_NAME") def get_current_context(self): """Get current context information. @@ -133,16 +132,14 @@ class HostBase(object): can be opened multiple workfiles at one moment and change of context can't be caught properly. - Default implementation returns values from 'legacy_io.Session'. - Returns: Dict[str, Union[str, None]]: Context with 3 keys 'project_name', - 'asset_name' and 'task_name'. All of them can be 'None'. + 'folder_path' and 'task_name'. All of them can be 'None'. """ return { "project_name": self.get_current_project_name(), - "asset_name": self.get_current_asset_name(), + "folder_path": self.get_current_asset_name(), "task_name": self.get_current_task_name() } @@ -164,7 +161,7 @@ class HostBase(object): # Use current context to fill the context title current_context = self.get_current_context() project_name = current_context["project_name"] - asset_name = current_context["asset_name"] + asset_name = current_context["folder_path"] task_name = current_context["task_name"] items = [] if project_name: diff --git a/client/ayon_core/host/interfaces.py b/client/ayon_core/host/interfaces.py index 7c6057acf0..7157ad6f7e 100644 --- a/client/ayon_core/host/interfaces.py +++ b/client/ayon_core/host/interfaces.py @@ -234,7 +234,7 @@ class IWorkfileHost: str: Path to new workdir. """ - return session["AVALON_WORKDIR"] + return session["AYON_WORKDIR"] # --- Deprecated method names --- def file_extensions(self): diff --git a/client/ayon_core/hosts/aftereffects/addon.py b/client/ayon_core/hosts/aftereffects/addon.py index 278f836a72..46d0818247 100644 --- a/client/ayon_core/hosts/aftereffects/addon.py +++ b/client/ayon_core/hosts/aftereffects/addon.py @@ -1,13 +1,10 @@ -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon -class AfterEffectsAddon(OpenPypeModule, IHostAddon): +class AfterEffectsAddon(AYONAddon, IHostAddon): name = "aftereffects" host_name = "aftereffects" - def initialize(self, module_settings): - self.enabled = True - def add_implementation_envs(self, env, _app): """Modify environments to contain all required for implementation.""" defaults = { diff --git a/client/ayon_core/hosts/aftereffects/api/launch_logic.py b/client/ayon_core/hosts/aftereffects/api/launch_logic.py index ad521c2f01..0d1a6cf585 100644 --- a/client/ayon_core/hosts/aftereffects/api/launch_logic.py +++ b/client/ayon_core/hosts/aftereffects/api/launch_logic.py @@ -15,9 +15,8 @@ from wsrpc_aiohttp import ( from qtpy import QtCore -from ayon_core.lib import Logger -from ayon_core.tests.lib import is_in_tests -from ayon_core.pipeline import install_host, legacy_io +from ayon_core.lib import Logger, is_in_tests +from ayon_core.pipeline import install_host from ayon_core.addon import AddonsManager from ayon_core.tools.utils import host_tools, get_ayon_qt_app from ayon_core.tools.adobe_webserver.app import WebServerTool @@ -298,14 +297,11 @@ class AfterEffectsRoute(WebSocketRoute): log.info("Setting context change") log.info("project {} asset {} ".format(project, asset)) if project: - legacy_io.Session["AVALON_PROJECT"] = project - os.environ["AVALON_PROJECT"] = project + os.environ["AYON_PROJECT_NAME"] = project if asset: - legacy_io.Session["AVALON_ASSET"] = asset - os.environ["AVALON_ASSET"] = asset + os.environ["AYON_FOLDER_PATH"] = asset if task: - legacy_io.Session["AVALON_TASK"] = task - os.environ["AVALON_TASK"] = task + os.environ["AYON_TASK_NAME"] = task async def read(self): log.debug("aftereffects.read client calls server server calls " diff --git a/client/ayon_core/hosts/aftereffects/api/lib.py b/client/ayon_core/hosts/aftereffects/api/lib.py index f5f2d98698..0a2ee7b7ac 100644 --- a/client/ayon_core/hosts/aftereffects/api/lib.py +++ b/client/ayon_core/hosts/aftereffects/api/lib.py @@ -128,7 +128,7 @@ def set_settings(frames, resolution, comp_ids=None, print_msg=True): current_context = get_current_context() asset_doc = get_asset_by_name(current_context["project_name"], - current_context["asset_name"]) + current_context["folder_path"]) settings = get_asset_settings(asset_doc) msg = '' diff --git a/client/ayon_core/hosts/aftereffects/api/pipeline.py b/client/ayon_core/hosts/aftereffects/api/pipeline.py index 32e064d8cb..7ed244fd1d 100644 --- a/client/ayon_core/hosts/aftereffects/api/pipeline.py +++ b/client/ayon_core/hosts/aftereffects/api/pipeline.py @@ -9,6 +9,8 @@ from ayon_core.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, AVALON_CONTAINER_ID, + AVALON_INSTANCE_ID, + AYON_INSTANCE_ID, ) from ayon_core.hosts.aftereffects.api.workfile_template_builder import ( AEPlaceholderLoadPlugin, @@ -142,7 +144,9 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): layers_meta = stub.get_metadata() for instance in layers_meta: - if instance.get("id") == "pyblish.avalon.instance": + if instance.get("id") in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: instances.append(instance) return instances diff --git a/client/ayon_core/hosts/aftereffects/plugins/create/create_render.py b/client/ayon_core/hosts/aftereffects/plugins/create/create_render.py index 78aa49a562..93aec33222 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/create/create_render.py +++ b/client/ayon_core/hosts/aftereffects/plugins/create/create_render.py @@ -11,7 +11,7 @@ from ayon_core.pipeline import ( from ayon_core.hosts.aftereffects.api.pipeline import cache_and_get_instances from ayon_core.hosts.aftereffects.api.lib import set_settings from ayon_core.lib import prepare_template_data -from ayon_core.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS +from ayon_core.pipeline.create import PRODUCT_NAME_ALLOWED_SYMBOLS class RenderCreator(Creator): @@ -22,7 +22,7 @@ class RenderCreator(Creator): """ identifier = "render" label = "Render" - family = "render" + product_type = "render" description = "Render creator" create_allow_context_change = True @@ -31,7 +31,7 @@ class RenderCreator(Creator): mark_for_review = True force_setting_values = True - def create(self, subset_name_from_ui, data, pre_create_data): + def create(self, product_name, data, pre_create_data): stub = api.get_stub() # only after After Effects is up try: @@ -58,33 +58,37 @@ class RenderCreator(Creator): len(comps) > 1) for comp in comps: composition_name = re.sub( - "[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS), + "[^{}]+".format(PRODUCT_NAME_ALLOWED_SYMBOLS), "", comp.name ) if use_composition_name: - if "{composition}" not in subset_name_from_ui.lower(): - subset_name_from_ui += "{Composition}" + if "{composition}" not in product_name.lower(): + product_name += "{Composition}" dynamic_fill = prepare_template_data({"composition": composition_name}) - subset_name = subset_name_from_ui.format(**dynamic_fill) + comp_product_name = product_name.format(**dynamic_fill) data["composition_name"] = composition_name else: - subset_name = subset_name_from_ui - subset_name = re.sub(r"\{composition\}", '', subset_name, - flags=re.IGNORECASE) + comp_product_name = re.sub( + r"\{composition\}", + "", + product_name, + flags=re.IGNORECASE + ) for inst in self.create_context.instances: - if subset_name == inst.subset_name: + if comp_product_name == inst.product_name: raise CreatorError("{} already exists".format( - inst.subset_name)) + inst.product_name)) data["members"] = [comp.id] data["orig_comp_name"] = composition_name - new_instance = CreatedInstance(self.family, subset_name, data, - self) + new_instance = CreatedInstance( + self.product_type, comp_product_name, data, self + ) if "farm" in pre_create_data: use_farm = pre_create_data["farm"] new_instance.creator_attributes["farm"] = use_farm @@ -96,7 +100,7 @@ class RenderCreator(Creator): new_instance.data_to_store()) self._add_instance_to_context(new_instance) - stub.rename_item(comp.id, subset_name) + stub.rename_item(comp.id, comp_product_name) if self.force_setting_values: set_settings(True, True, [comp.id], print_msg=False) @@ -107,7 +111,7 @@ class RenderCreator(Creator): "selected by default.", default=True, label="Use selection"), BoolDef("use_composition_name", - label="Use composition name in subset"), + label="Use composition name in product"), UISeparatorDef(), BoolDef("farm", label="Render on farm"), BoolDef( @@ -133,9 +137,14 @@ class RenderCreator(Creator): def collect_instances(self): for instance_data in cache_and_get_instances(self): - # legacy instances have family=='render' or 'renderLocal', use them - creator_id = (instance_data.get("creator_identifier") or - instance_data.get("family", '').replace("Local", '')) + # legacy instances have product_type=='render' or 'renderLocal', use them + creator_id = instance_data.get("creator_identifier") + if not creator_id: + # NOTE this is for backwards compatibility but probably can be + # removed + creator_id = instance_data.get("family", "") + creator_id = creator_id.replace("Local", "") + if creator_id == self.identifier: instance_data = self._handle_legacy(instance_data) instance = CreatedInstance.from_existing( @@ -147,10 +156,10 @@ class RenderCreator(Creator): for created_inst, _changes in update_list: api.get_stub().imprint(created_inst.get("instance_id"), created_inst.data_to_store()) - subset_change = _changes.get("subset") - if subset_change: + name_change = _changes.get("productName") + if name_change: api.get_stub().rename_item(created_inst.data["members"][0], - subset_change.new_value) + name_change.new_value) def remove_instances(self, instances): """Removes metadata and renames to original comp name if available.""" @@ -183,33 +192,34 @@ class RenderCreator(Creator): def get_detail_description(self): return """Creator for Render instances - Main publishable item in AfterEffects will be of `render` family. + Main publishable item in AfterEffects will be of `render` product type. Result of this item (instance) is picture sequence or video that could be a final delivery product or loaded and used in another DCCs. - Select single composition and create instance of 'render' family or - turn off 'Use selection' to create instance for all compositions. + Select single composition and create instance of 'render' product type + or turn off 'Use selection' to create instance for all compositions. - 'Use composition name in subset' allows to explicitly add composition - name into created subset name. + 'Use composition name in product' allows to explicitly add composition + name into created product name. Position of composition name could be set in - `project_settings/global/tools/creator/subset_name_profiles` with some - form of '{composition}' placeholder. + `project_settings/global/tools/creator/product_name_profiles` with + some form of '{composition}' placeholder. Composition name will be used implicitly if multiple composition should be handled at same time. - If {composition} placeholder is not us 'subset_name_profiles' - composition name will be capitalized and set at the end of subset name - if necessary. + If {composition} placeholder is not us 'product_name_profiles' + composition name will be capitalized and set at the end of + product name if necessary. If composition name should be used, it will be cleaned up of characters that would cause an issue in published file names. """ - def get_dynamic_data(self, variant, task_name, asset_doc, - project_name, host_name, instance): + def get_dynamic_data( + self, project_name, asset_doc, task_name, variant, host_name, instance + ): dynamic_data = {} if instance is not None: composition_name = instance.get("composition_name") @@ -234,9 +244,9 @@ class RenderCreator(Creator): instance_data["task"] = self.create_context.get_current_task_name() if not instance_data.get("creator_attributes"): - is_old_farm = instance_data["family"] != "renderLocal" + is_old_farm = instance_data.get("family") != "renderLocal" instance_data["creator_attributes"] = {"farm": is_old_farm} - instance_data["family"] = self.family + instance_data["productType"] = self.product_type if instance_data["creator_attributes"].get("mark_for_review") is None: instance_data["creator_attributes"]["mark_for_review"] = True diff --git a/client/ayon_core/hosts/aftereffects/plugins/create/workfile_creator.py b/client/ayon_core/hosts/aftereffects/plugins/create/workfile_creator.py index 49f965800d..282e06d0bf 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/create/workfile_creator.py +++ b/client/ayon_core/hosts/aftereffects/plugins/create/workfile_creator.py @@ -9,7 +9,7 @@ from ayon_core.hosts.aftereffects.api.pipeline import cache_and_get_instances class AEWorkfileCreator(AutoCreator): identifier = "workfile" - family = "workfile" + product_type = "workfile" default_variant = "Main" @@ -20,9 +20,9 @@ class AEWorkfileCreator(AutoCreator): for instance_data in cache_and_get_instances(self): creator_id = instance_data.get("creator_identifier") if creator_id == self.identifier: - subset_name = instance_data["subset"] + product_name = instance_data["productName"] instance = CreatedInstance( - self.family, subset_name, instance_data, self + self.product_type, product_name, instance_data, self ) self._add_instance_to_context(instance) @@ -33,7 +33,7 @@ class AEWorkfileCreator(AutoCreator): def create(self, options=None): existing_instance = None for instance in self.create_context.instances: - if instance.family == self.family: + if instance.product_type == self.product_type: existing_instance = instance break @@ -49,9 +49,12 @@ class AEWorkfileCreator(AutoCreator): if existing_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - self.default_variant, task_name, asset_doc, - project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, ) data = { "folderPath": asset_name, @@ -59,12 +62,16 @@ class AEWorkfileCreator(AutoCreator): "variant": self.default_variant, } data.update(self.get_dynamic_data( - self.default_variant, task_name, asset_doc, - project_name, host_name, None + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, + None, )) new_instance = CreatedInstance( - self.family, subset_name, data, self + self.product_type, product_name, data, self ) self._add_instance_to_context(new_instance) @@ -76,10 +83,13 @@ class AEWorkfileCreator(AutoCreator): or existing_instance["task"] != task_name ): asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - self.default_variant, task_name, asset_doc, - project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, ) existing_instance["folderPath"] = asset_name existing_instance["task"] = task_name - existing_instance["subset"] = subset_name + existing_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/aftereffects/plugins/load/load_background.py b/client/ayon_core/hosts/aftereffects/plugins/load/load_background.py index f23d7ec0bd..b834875e89 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/load/load_background.py +++ b/client/ayon_core/hosts/aftereffects/plugins/load/load_background.py @@ -11,7 +11,7 @@ from ayon_core.hosts.aftereffects.api.lib import ( class BackgroundLoader(api.AfterEffectsLoader): """ - Load images from Background family + Load images from Background product type Creates for each background separate folder with all imported images from background json AND automatically created composition with layers, each layer for separate image. @@ -56,16 +56,21 @@ class BackgroundLoader(api.AfterEffectsLoader): self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): """ Switch asset or change version """ stub = self.get_stub() - context = representation.get("context", {}) + asset_doc = context["asset"] + subset_doc = context["subset"] + repre_doc = context["representation"] + + folder_name = asset_doc["name"] + product_name = subset_doc["name"] _ = container.pop("layer") # without iterator number (_001, 002...) namespace_from_container = re.sub(r'_\d{3}$', '', container["namespace"]) - comp_name = "{}_{}".format(context["asset"], context["subset"]) + comp_name = "{}_{}".format(folder_name, product_name) # switching assets if namespace_from_container != comp_name: @@ -73,11 +78,11 @@ class BackgroundLoader(api.AfterEffectsLoader): existing_items = [layer.name for layer in items] comp_name = get_unique_layer_name( existing_items, - "{}_{}".format(context["asset"], context["subset"])) + "{}_{}".format(folder_name, product_name)) else: # switching version - keep same name comp_name = container["namespace"] - path = get_representation_path(representation) + path = get_representation_path(repre_doc) layers = get_background_layers(path) comp = stub.reload_background(container["members"][1], @@ -85,8 +90,8 @@ class BackgroundLoader(api.AfterEffectsLoader): layers) # update container - container["representation"] = str(representation["_id"]) - container["name"] = context["subset"] + container["representation"] = str(repre_doc["_id"]) + container["name"] = product_name container["namespace"] = comp_name container["members"] = comp.members @@ -104,5 +109,5 @@ class BackgroundLoader(api.AfterEffectsLoader): stub.imprint(layer.id, {}) stub.delete_item(layer.id) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/aftereffects/plugins/load/load_file.py b/client/ayon_core/hosts/aftereffects/plugins/load/load_file.py index a8e67e9f88..bceea66e8e 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/load/load_file.py +++ b/client/ayon_core/hosts/aftereffects/plugins/load/load_file.py @@ -64,31 +64,36 @@ class FileLoader(api.AfterEffectsLoader): self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): """ Switch asset or change version """ stub = self.get_stub() layer = container.pop("layer") - context = representation.get("context", {}) + asset_doc = context["asset"] + subset_doc = context["subset"] + repre_doc = context["representation"] + + folder_name = asset_doc["name"] + product_name = subset_doc["name"] namespace_from_container = re.sub(r'_\d{3}$', '', container["namespace"]) - layer_name = "{}_{}".format(context["asset"], context["subset"]) + layer_name = "{}_{}".format(folder_name, product_name) # switching assets if namespace_from_container != layer_name: layers = stub.get_items(comps=True) existing_layers = [layer.name for layer in layers] layer_name = get_unique_layer_name( existing_layers, - "{}_{}".format(context["asset"], context["subset"])) + "{}_{}".format(folder_name, product_name)) else: # switching version - keep same name layer_name = container["namespace"] - path = get_representation_path(representation) + path = get_representation_path(repre_doc) # with aftereffects.maintained_selection(): # TODO stub.replace_item(layer.id, path, stub.LOADED_ICON + layer_name) stub.imprint( - layer.id, {"representation": str(representation["_id"]), - "name": context["subset"], + layer.id, {"representation": str(repre_doc["_id"]), + "name": product_name, "namespace": layer_name} ) @@ -103,5 +108,5 @@ class FileLoader(api.AfterEffectsLoader): stub.imprint(layer.id, {}) stub.delete_item(layer.id) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py index a8a316ea80..afd58ca758 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py @@ -60,8 +60,8 @@ class CollectAERender(publish.AbstractCollectRender): if not inst.data.get("active", True): continue - family = inst.data["family"] - if family not in ["render", "renderLocal"]: # legacy + product_type = inst.data["productType"] + if product_type not in ["render", "renderLocal"]: # legacy continue comp_id = int(inst.data["members"][0]) @@ -81,29 +81,32 @@ class CollectAERender(publish.AbstractCollectRender): fps = comp_info.frameRate # TODO add resolution when supported by extension - task_name = inst.data.get("task") # legacy + task_name = inst.data.get("task") render_q = CollectAERender.get_stub().get_render_info(comp_id) if not render_q: raise ValueError("No file extension set in Render Queue") render_item = render_q[0] + product_type = "render" instance_families = inst.data.get("families", []) - subset_name = inst.data["subset"] + instance_families.append(product_type) + product_name = inst.data["productName"] instance = AERenderInstance( - family="render", + productType=product_type, + family=product_type, families=instance_families, version=version, time="", source=current_file, - label="{} - {}".format(subset_name, family), - subset=subset_name, - asset=inst.data["asset"], + label="{} - {}".format(product_name, product_type), + productName=product_name, + folderPath=inst.data["folderPath"], task=task_name, attachTo=False, setMembers='', publish=True, - name=subset_name, + name=product_name, resolutionWidth=render_item.width, resolutionHeight=render_item.height, pixelAspect=1, @@ -175,8 +178,8 @@ class CollectAERender(publish.AbstractCollectRender): version_str = "v{:03d}".format(render_instance.version) if "#" not in file_name: # single frame (mov)W path = os.path.join(base_dir, "{}_{}_{}.{}".format( - render_instance.asset, - render_instance.subset, + render_instance.folderPath, + render_instance.productName, version_str, ext )) @@ -184,8 +187,8 @@ class CollectAERender(publish.AbstractCollectRender): else: for frame in range(start, end + 1): path = os.path.join(base_dir, "{}_{}_{}.{}.{}".format( - render_instance.asset, - render_instance.subset, + render_instance.folderPath, + render_instance.productName, version_str, str(frame).zfill(self.padding_width), ext diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_review.py b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_review.py index a933b9fed2..667e9cf8b9 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_review.py +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_review.py @@ -3,7 +3,7 @@ Requires: None Provides: - instance -> family ("review") + instance -> families ("review") """ import pyblish.api diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_workfile.py b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_workfile.py index 538d646ab4..cd8e102500 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -2,9 +2,6 @@ import os import pyblish.api -from ayon_core.client import get_asset_name_identifier -from ayon_core.pipeline.create import get_subset_name - class CollectWorkfile(pyblish.api.ContextPlugin): """ Adds the AE render instances """ @@ -15,86 +12,24 @@ class CollectWorkfile(pyblish.api.ContextPlugin): default_variant = "Main" def process(self, context): - existing_instance = None + workfile_instance = None for instance in context: - if instance.data["family"] == "workfile": - self.log.debug("Workfile instance found, won't create new") - existing_instance = instance + if instance.data["productType"] == "workfile": + self.log.debug("Workfile instance found") + workfile_instance = instance break current_file = context.data["currentFile"] staging_dir = os.path.dirname(current_file) scene_file = os.path.basename(current_file) - if existing_instance is None: # old publish - instance = self._get_new_instance(context, scene_file) - else: - instance = existing_instance + if workfile_instance is None: + self.log.debug("Workfile instance not found. Skipping") + return # creating representation - representation = { - 'name': 'aep', - 'ext': 'aep', - 'files': scene_file, + workfile_instance.data["representations"].append({ + "name": "aep", + "ext": "aep", + "files": scene_file, "stagingDir": staging_dir, - } - - if not instance.data.get("representations"): - instance.data["representations"] = [] - instance.data["representations"].append(representation) - - instance.data["publish"] = instance.data["active"] # for DL - - def _get_new_instance(self, context, scene_file): - task = context.data["task"] - version = context.data["version"] - asset_entity = context.data["assetEntity"] - project_entity = context.data["projectEntity"] - - asset_name = get_asset_name_identifier(asset_entity) - - instance_data = { - "active": True, - "asset": asset_name, - "task": task, - "frameStart": context.data['frameStart'], - "frameEnd": context.data['frameEnd'], - "handleStart": context.data['handleStart'], - "handleEnd": context.data['handleEnd'], - "fps": asset_entity["data"]["fps"], - "resolutionWidth": asset_entity["data"].get( - "resolutionWidth", - project_entity["data"]["resolutionWidth"]), - "resolutionHeight": asset_entity["data"].get( - "resolutionHeight", - project_entity["data"]["resolutionHeight"]), - "pixelAspect": 1, - "step": 1, - "version": version - } - - # workfile instance - family = "workfile" - subset = get_subset_name( - family, - self.default_variant, - context.data["anatomyData"]["task"]["name"], - context.data["assetEntity"], - context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"], - project_settings=context.data["project_settings"] - ) - # Create instance - instance = context.create_instance(subset) - - # creating instance data - instance.data.update({ - "subset": subset, - "label": scene_file, - "family": family, - "families": [family], - "representations": list() }) - - instance.data.update(instance_data) - - return instance diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/help/validate_instance_asset.xml b/client/ayon_core/hosts/aftereffects/plugins/publish/help/validate_instance_asset.xml index edf62a5141..d89a851c64 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/help/validate_instance_asset.xml +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/help/validate_instance_asset.xml @@ -3,9 +3,9 @@ Subset context -## Invalid subset context +## Invalid product context -Context of the given subset doesn't match your current scene. +Context of the given product doesn't match your current scene. ### How to repair? @@ -15,7 +15,7 @@ You can fix this with "repair" button on the right and refresh Publish at the bo ### __Detailed Info__ (optional) This might happen if you are reuse old workfile and open it in different context. -(Eg. you created subset "renderCompositingDefault" from asset "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing subset for "Robot" asset stayed in the workfile.) +(Eg. you created product name "renderCompositingDefault" from folder "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing product for "Robot" asset stayed in the workfile.) \ No newline at end of file diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/pre_collect_render.py b/client/ayon_core/hosts/aftereffects/plugins/publish/pre_collect_render.py deleted file mode 100644 index de3c935dff..0000000000 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/pre_collect_render.py +++ /dev/null @@ -1,54 +0,0 @@ -import json -import pyblish.api -from ayon_core.hosts.aftereffects.api import AfterEffectsHost - - -class PreCollectRender(pyblish.api.ContextPlugin): - """ - Checks if render instance is of old type, adds to families to both - existing collectors work same way. - - Could be removed in the future when no one uses old publish. - """ - - label = "PreCollect Render" - order = pyblish.api.CollectorOrder + 0.400 - hosts = ["aftereffects"] - - family_remapping = { - "render": ("render.farm", "farm"), # (family, label) - "renderLocal": ("render.local", "local") - } - - def process(self, context): - if context.data.get("newPublishing"): - self.log.debug("Not applicable for New Publisher, skip") - return - - for inst in AfterEffectsHost().list_instances(): - if inst.get("creator_attributes"): - raise ValueError("Instance created in New publisher, " - "cannot be published in Pyblish.\n" - "Please publish in New Publisher " - "or recreate instances with legacy Creators") - - if inst["family"] not in self.family_remapping.keys(): - continue - - if not inst["members"]: - raise ValueError("Couldn't find id, unable to publish. " + - "Please recreate instance.") - - instance = context.create_instance(inst["subset"]) - inst["families"] = [self.family_remapping[inst["family"]][0]] - instance.data.update(inst) - - self._debug_log(instance) - - def _debug_log(self, instance): - def _default_json(value): - return str(value) - - self.log.info( - json.dumps(instance.data, indent=4, default=_default_json) - ) diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/validate_instance_asset.py b/client/ayon_core/hosts/aftereffects/plugins/publish/validate_instance_asset.py index c3938ecbda..e8f2e29a2f 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/validate_instance_asset.py +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/validate_instance_asset.py @@ -30,7 +30,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action): for instance in instances: data = stub.read(instance[0]) - data["asset"] = get_current_asset_name() + data["folderPath"] = get_current_asset_name() stub.imprint(instance[0].instance_id, data) @@ -53,7 +53,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin): order = ValidateContentsOrder def process(self, instance): - instance_asset = instance.data["asset"] + instance_asset = instance.data["folderPath"] current_asset = get_current_asset_name() msg = ( f"Instance asset {instance_asset} is not the same " diff --git a/client/ayon_core/hosts/blender/addon.py b/client/ayon_core/hosts/blender/addon.py index c3804382e5..b7484de243 100644 --- a/client/ayon_core/hosts/blender/addon.py +++ b/client/ayon_core/hosts/blender/addon.py @@ -1,16 +1,13 @@ import os -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon BLENDER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class BlenderAddon(OpenPypeModule, IHostAddon): +class BlenderAddon(AYONAddon, IHostAddon): name = "blender" host_name = "blender" - def initialize(self, module_settings): - self.enabled = True - def add_implementation_envs(self, env, _app): """Modify environments to contain all required for implementation.""" # Prepare path to implementation script diff --git a/client/ayon_core/hosts/blender/api/pipeline.py b/client/ayon_core/hosts/blender/api/pipeline.py index 6801b1f71b..fcac285f74 100644 --- a/client/ayon_core/hosts/blender/api/pipeline.py +++ b/client/ayon_core/hosts/blender/api/pipeline.py @@ -19,7 +19,6 @@ from ayon_core.host import ( from ayon_core.client import get_asset_by_name from ayon_core.pipeline import ( schema, - legacy_io, get_current_project_name, get_current_asset_name, register_loader_plugin_path, @@ -27,6 +26,7 @@ from ayon_core.pipeline import ( deregister_loader_plugin_path, deregister_creator_plugin_path, AVALON_CONTAINER_ID, + AYON_CONTAINER_ID, ) from ayon_core.lib import ( Logger, @@ -273,7 +273,7 @@ def set_resolution(data): def on_new(): - project = os.environ.get("AVALON_PROJECT") + project = os.environ.get("AYON_PROJECT_NAME") settings = get_project_settings(project).get("blender") set_resolution_startup = settings.get("set_resolution_startup") @@ -294,7 +294,7 @@ def on_new(): def on_open(): - project = os.environ.get("AVALON_PROJECT") + project = os.environ.get("AYON_PROJECT_NAME") settings = get_project_settings(project).get("blender") set_resolution_startup = settings.get("set_resolution_startup") @@ -380,7 +380,7 @@ def _on_task_changed(): # `directory` attribute, so it opens in that directory (does it?). # https://docs.blender.org/api/blender2.8/bpy.types.Operator.html#calling-a-file-selector # https://docs.blender.org/api/blender2.8/bpy.types.WindowManager.html#bpy.types.WindowManager.fileselect_add - workdir = legacy_io.Session["AVALON_WORKDIR"] + workdir = os.getenv("AYON_WORKDIR") log.debug("New working directory: %s", workdir) @@ -564,8 +564,9 @@ def ls() -> Iterator: called containers. """ - for container in lib.lsattr("id", AVALON_CONTAINER_ID): - yield parse_container(container) + for id_type in {AYON_CONTAINER_ID, AVALON_CONTAINER_ID}: + for container in lib.lsattr("id", id_type): + yield parse_container(container) def publish(): diff --git a/client/ayon_core/hosts/blender/api/plugin.py b/client/ayon_core/hosts/blender/api/plugin.py index 2cd8d1f291..4b45d8ffa3 100644 --- a/client/ayon_core/hosts/blender/api/plugin.py +++ b/client/ayon_core/hosts/blender/api/plugin.py @@ -10,6 +10,8 @@ from ayon_core.pipeline import ( Creator, CreatedInstance, LoaderPlugin, + AVALON_INSTANCE_ID, + AYON_INSTANCE_ID, ) from ayon_core.lib import BoolDef @@ -28,13 +30,13 @@ VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx"] def prepare_scene_name( - asset: str, subset: str, namespace: Optional[str] = None + folder_name: str, product_name: str, namespace: Optional[str] = None ) -> str: """Return a consistent name for an asset.""" - name = f"{asset}" + name = f"{folder_name}" if namespace: name = f"{name}_{namespace}" - name = f"{name}_{subset}" + name = f"{name}_{product_name}" # Blender name for a collection or object cannot be longer than 63 # characters. If the name is longer, it will raise an error. @@ -45,7 +47,7 @@ def prepare_scene_name( def get_unique_number( - asset: str, subset: str + folder_name: str, product_name: str ) -> str: """Return a unique number based on the asset name.""" avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) @@ -62,10 +64,10 @@ def get_unique_number( if c.get(AVALON_PROPERTY)} container_names = obj_group_names.union(coll_group_names) count = 1 - name = f"{asset}_{count:0>2}_{subset}" + name = f"{folder_name}_{count:0>2}_{product_name}" while name in container_names: count += 1 - name = f"{asset}_{count:0>2}_{subset}" + name = f"{folder_name}_{count:0>2}_{product_name}" return f"{count:0>2}" @@ -159,24 +161,22 @@ class BaseCreator(Creator): create_as_asset_group = False @staticmethod - def cache_subsets(shared_data): + def cache_instance_data(shared_data): """Cache instances for Creators shared data. - Create `blender_cached_subsets` key when needed in shared data and + Create `blender_cached_instances` key when needed in shared data and fill it with all collected instances from the scene under its respective creator identifiers. If legacy instances are detected in the scene, create - `blender_cached_legacy_subsets` key and fill it with - all legacy subsets from this family as a value. # key or value? + `blender_cached_legacy_instances` key and fill it with + all legacy products from this family as a value. # key or value? Args: shared_data(Dict[str, Any]): Shared data. - Return: - Dict[str, Any]: Shared data with cached subsets. """ - if not shared_data.get('blender_cached_subsets'): + if not shared_data.get('blender_cached_instances'): cache = {} cache_legacy = {} @@ -193,7 +193,9 @@ class BaseCreator(Creator): if not avalon_prop: continue - if avalon_prop.get('id') != 'pyblish.avalon.instance': + if avalon_prop.get('id') not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: continue creator_id = avalon_prop.get('creator_identifier') @@ -206,19 +208,19 @@ class BaseCreator(Creator): # Legacy creator instance cache_legacy.setdefault(family, []).append(obj_or_col) - shared_data["blender_cached_subsets"] = cache - shared_data["blender_cached_legacy_subsets"] = cache_legacy + shared_data["blender_cached_instances"] = cache + shared_data["blender_cached_legacy_instances"] = cache_legacy return shared_data def create( - self, subset_name: str, instance_data: dict, pre_create_data: dict + self, product_name: str, instance_data: dict, pre_create_data: dict ): """Override abstract method from Creator. Create new instance and store it. Args: - subset_name(str): Subset name of created instance. + product_name(str): Subset name of created instance. instance_data(dict): Instance base data. pre_create_data(dict): Data based on pre creation attributes. Those may affect how creator works. @@ -232,7 +234,7 @@ class BaseCreator(Creator): # Create asset group asset_name = instance_data["folderPath"].split("/")[-1] - name = prepare_scene_name(asset_name, subset_name) + name = prepare_scene_name(asset_name, product_name) if self.create_as_asset_group: # Create instance as empty instance_node = bpy.data.objects.new(name=name, object_data=None) @@ -243,10 +245,10 @@ class BaseCreator(Creator): instance_node = bpy.data.collections.new(name=name) instances.children.link(instance_node) - self.set_instance_data(subset_name, instance_data) + self.set_instance_data(product_name, instance_data) instance = CreatedInstance( - self.family, subset_name, instance_data, self + self.product_type, product_name, instance_data, self ) instance.transient_data["instance_node"] = instance_node self._add_instance_to_context(instance) @@ -259,18 +261,18 @@ class BaseCreator(Creator): """Override abstract method from BaseCreator. Collect existing instances related to this creator plugin.""" - # Cache subsets in shared data - self.cache_subsets(self.collection_shared_data) + # Cache instances in shared data + self.cache_instance_data(self.collection_shared_data) - # Get cached subsets - cached_subsets = self.collection_shared_data.get( - "blender_cached_subsets" + # Get cached instances + cached_instances = self.collection_shared_data.get( + "blender_cached_instances" ) - if not cached_subsets: + if not cached_instances: return # Process only instances that were created by this creator - for instance_node in cached_subsets.get(self.identifier, []): + for instance_node in cached_instances.get(self.identifier, []): property = instance_node.get(AVALON_PROPERTY) # Create instance object from existing data instance = CreatedInstance.from_existing( @@ -302,16 +304,17 @@ class BaseCreator(Creator): ) return - # Rename the instance node in the scene if subset or asset changed. + # Rename the instance node in the scene if product + # or folder changed. # Do not rename the instance if the family is workfile, as the # workfile instance is included in the AVALON_CONTAINER collection. if ( - "subset" in changes.changed_keys + "productName" in changes.changed_keys or "folderPath" in changes.changed_keys - ) and created_instance.family != "workfile": + ) and created_instance.product_type != "workfile": asset_name = data["folderPath"].split("/")[-1] name = prepare_scene_name( - asset=asset_name, subset=data["subset"] + asset_name, data["productName"] ) node.name = name @@ -337,13 +340,13 @@ class BaseCreator(Creator): def set_instance_data( self, - subset_name: str, + product_name: str, instance_data: dict ): """Fill instance data with required items. Args: - subset_name(str): Subset name of created instance. + product_name(str): Subset name of created instance. instance_data(dict): Instance base data. instance_node(bpy.types.ID): Instance node in blender scene. """ @@ -352,9 +355,9 @@ class BaseCreator(Creator): instance_data.update( { - "id": "pyblish.avalon.instance", + "id": AVALON_INSTANCE_ID, "creator_identifier": self.identifier, - "subset": subset_name, + "productName": product_name, } ) @@ -462,14 +465,14 @@ class AssetLoader(LoaderPlugin): filepath = self.filepath_from_context(context) assert Path(filepath).exists(), f"{filepath} doesn't exist." - asset = context["asset"]["name"] - subset = context["subset"]["name"] + folder_name = context["asset"]["name"] + product_name = context["subset"]["name"] unique_number = get_unique_number( - asset, subset + folder_name, product_name ) - namespace = namespace or f"{asset}_{unique_number}" + namespace = namespace or f"{folder_name}_{unique_number}" name = name or prepare_scene_name( - asset, subset, unique_number + folder_name, product_name, unique_number ) nodes = self.process_asset( @@ -495,21 +498,21 @@ class AssetLoader(LoaderPlugin): # loader=self.__class__.__name__, # ) - # asset = context["asset"]["name"] - # subset = context["subset"]["name"] + # folder_name = context["asset"]["name"] + # product_name = context["subset"]["name"] # instance_name = prepare_scene_name( - # asset, subset, unique_number + # folder_name, product_name, unique_number # ) + '_CON' # return self._get_instance_collection(instance_name, nodes) - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Must be implemented by a sub-class""" raise NotImplementedError("Must be implemented by a sub-class") - def update(self, container: Dict, representation: Dict): + def update(self, container: Dict, context: Dict): """ Run the update on Blender main thread""" - mti = MainThreadItem(self.exec_update, container, representation) + mti = MainThreadItem(self.exec_update, container, context) execute_in_main_thread(mti) def exec_remove(self, container: Dict) -> bool: diff --git a/client/ayon_core/hosts/blender/api/render_lib.py b/client/ayon_core/hosts/blender/api/render_lib.py index 35aa1f12c9..91913f7913 100644 --- a/client/ayon_core/hosts/blender/api/render_lib.py +++ b/client/ayon_core/hosts/blender/api/render_lib.py @@ -47,6 +47,22 @@ def get_multilayer(settings): ["multilayer_exr"]) +def get_renderer(settings): + """Get renderer from blender settings.""" + + return (settings["blender"] + ["RenderSettings"] + ["renderer"]) + + +def get_compositing(settings): + """Get compositing from blender settings.""" + + return (settings["blender"] + ["RenderSettings"] + ["compositing"]) + + def get_render_product(output_path, name, aov_sep): """ Generate the path to the render product. Blender interprets the `#` @@ -91,66 +107,120 @@ def set_render_format(ext, multilayer): image_settings.file_format = "TIFF" -def set_render_passes(settings): - aov_list = (settings["blender"] - ["RenderSettings"] - ["aov_list"]) - - custom_passes = (settings["blender"] - ["RenderSettings"] - ["custom_passes"]) +def set_render_passes(settings, renderer): + aov_list = set(settings["blender"]["RenderSettings"]["aov_list"]) + custom_passes = settings["blender"]["RenderSettings"]["custom_passes"] + # Common passes for both renderers vl = bpy.context.view_layer + # Data Passes vl.use_pass_combined = "combined" in aov_list vl.use_pass_z = "z" in aov_list vl.use_pass_mist = "mist" in aov_list vl.use_pass_normal = "normal" in aov_list + + # Light Passes vl.use_pass_diffuse_direct = "diffuse_light" in aov_list vl.use_pass_diffuse_color = "diffuse_color" in aov_list vl.use_pass_glossy_direct = "specular_light" in aov_list vl.use_pass_glossy_color = "specular_color" in aov_list - vl.eevee.use_pass_volume_direct = "volume_light" in aov_list vl.use_pass_emit = "emission" in aov_list vl.use_pass_environment = "environment" in aov_list - vl.use_pass_shadow = "shadow" in aov_list vl.use_pass_ambient_occlusion = "ao" in aov_list - cycles = vl.cycles + # Cryptomatte Passes + vl.use_pass_cryptomatte_object = "cryptomatte_object" in aov_list + vl.use_pass_cryptomatte_material = "cryptomatte_material" in aov_list + vl.use_pass_cryptomatte_asset = "cryptomatte_asset" in aov_list - cycles.denoising_store_passes = "denoising" in aov_list - cycles.use_pass_volume_direct = "volume_direct" in aov_list - cycles.use_pass_volume_indirect = "volume_indirect" in aov_list + if renderer == "BLENDER_EEVEE": + # Eevee exclusive passes + eevee = vl.eevee + + # Light Passes + vl.use_pass_shadow = "shadow" in aov_list + eevee.use_pass_volume_direct = "volume_light" in aov_list + + # Effects Passes + eevee.use_pass_bloom = "bloom" in aov_list + eevee.use_pass_transparent = "transparent" in aov_list + + # Cryptomatte Passes + vl.use_pass_cryptomatte_accurate = "cryptomatte_accurate" in aov_list + elif renderer == "CYCLES": + # Cycles exclusive passes + cycles = vl.cycles + + # Data Passes + vl.use_pass_position = "position" in aov_list + vl.use_pass_vector = "vector" in aov_list + vl.use_pass_uv = "uv" in aov_list + cycles.denoising_store_passes = "denoising" in aov_list + vl.use_pass_object_index = "object_index" in aov_list + vl.use_pass_material_index = "material_index" in aov_list + cycles.pass_debug_sample_count = "sample_count" in aov_list + + # Light Passes + vl.use_pass_diffuse_indirect = "diffuse_indirect" in aov_list + vl.use_pass_glossy_indirect = "specular_indirect" in aov_list + vl.use_pass_transmission_direct = "transmission_direct" in aov_list + vl.use_pass_transmission_indirect = "transmission_indirect" in aov_list + vl.use_pass_transmission_color = "transmission_color" in aov_list + cycles.use_pass_volume_direct = "volume_light" in aov_list + cycles.use_pass_volume_indirect = "volume_indirect" in aov_list + cycles.use_pass_shadow_catcher = "shadow" in aov_list aovs_names = [aov.name for aov in vl.aovs] for cp in custom_passes: - cp_name = cp[0] + cp_name = cp["attribute"] if cp_name not in aovs_names: aov = vl.aovs.add() aov.name = cp_name else: aov = vl.aovs[cp_name] - aov.type = cp[1].get("type", "VALUE") + aov.type = cp["value"] - return aov_list, custom_passes + return list(aov_list), custom_passes -def set_node_tree(output_path, name, aov_sep, ext, multilayer): +def _create_aov_slot(name, aov_sep, slots, rpass_name, multi_exr, output_path): + filename = f"{name}{aov_sep}{rpass_name}.####" + slot = slots.new(rpass_name if multi_exr else filename) + filepath = str(output_path / filename.lstrip("/")) + + return slot, filepath + + +def set_node_tree( + output_path, render_product, name, aov_sep, ext, multilayer, compositing +): # Set the scene to use the compositor node tree to render bpy.context.scene.use_nodes = True tree = bpy.context.scene.node_tree - # Get the Render Layers node - rl_node = None + comp_layer_type = "CompositorNodeRLayers" + output_type = "CompositorNodeOutputFile" + compositor_type = "CompositorNodeComposite" + + # Get the Render Layer, Composite and the previous output nodes + render_layer_node = None + composite_node = None + old_output_node = None for node in tree.nodes: - if node.bl_idname == "CompositorNodeRLayers": - rl_node = node + if node.bl_idname == comp_layer_type: + render_layer_node = node + elif node.bl_idname == compositor_type: + composite_node = node + elif node.bl_idname == output_type and "AYON" in node.name: + old_output_node = node + if render_layer_node and composite_node and old_output_node: break # If there's not a Render Layers node, we create it - if not rl_node: - rl_node = tree.nodes.new("CompositorNodeRLayers") + if not render_layer_node: + render_layer_node = tree.nodes.new(comp_layer_type) # Get the enabled output sockets, that are the active passes for the # render. @@ -158,48 +228,81 @@ def set_node_tree(output_path, name, aov_sep, ext, multilayer): exclude_sockets = ["Image", "Alpha", "Noisy Image"] passes = [ socket - for socket in rl_node.outputs + for socket in render_layer_node.outputs if socket.enabled and socket.name not in exclude_sockets ] - # Remove all output nodes - for node in tree.nodes: - if node.bl_idname == "CompositorNodeOutputFile": - tree.nodes.remove(node) - # Create a new output node - output = tree.nodes.new("CompositorNodeOutputFile") + output = tree.nodes.new(output_type) image_settings = bpy.context.scene.render.image_settings output.format.file_format = image_settings.file_format + slots = None + # In case of a multilayer exr, we don't need to use the output node, # because the blender render already outputs a multilayer exr. - if ext == "exr" and multilayer: - output.layer_slots.clear() - return [] + multi_exr = ext == "exr" and multilayer + slots = output.layer_slots if multi_exr else output.file_slots + output.base_path = render_product if multi_exr else str(output_path) - output.file_slots.clear() - output.base_path = str(output_path) + slots.clear() aov_file_products = [] + old_links = { + link.from_socket.name: link for link in tree.links + if link.to_node == old_output_node} + + # Create a new socket for the beauty output + pass_name = "rgba" if multi_exr else "beauty" + slot, _ = _create_aov_slot( + name, aov_sep, slots, pass_name, multi_exr, output_path) + tree.links.new(render_layer_node.outputs["Image"], slot) + + if compositing: + # Create a new socket for the composite output + pass_name = "composite" + comp_socket, filepath = _create_aov_slot( + name, aov_sep, slots, pass_name, multi_exr, output_path) + aov_file_products.append(("Composite", filepath)) + # For each active render pass, we add a new socket to the output node # and link it - for render_pass in passes: - filepath = f"{name}{aov_sep}{render_pass.name}.####" + for rpass in passes: + slot, filepath = _create_aov_slot( + name, aov_sep, slots, rpass.name, multi_exr, output_path) + aov_file_products.append((rpass.name, filepath)) - output.file_slots.new(filepath) + # If the rpass was not connected with the old output node, we connect + # it with the new one. + if not old_links.get(rpass.name): + tree.links.new(rpass, slot) - filename = str(output_path / filepath.lstrip("/")) + for link in list(old_links.values()): + # Check if the socket is still available in the new output node. + socket = output.inputs.get(link.to_socket.name) + # If it is, we connect it with the new output node. + if socket: + tree.links.new(link.from_socket, socket) + # Then, we remove the old link. + tree.links.remove(link) - aov_file_products.append((render_pass.name, filename)) + # If there's a composite node, we connect its input with the new output + if compositing and composite_node: + for link in tree.links: + if link.to_node == composite_node: + tree.links.new(link.from_socket, comp_socket) + break - node_input = output.inputs[-1] + if old_output_node: + output.location = old_output_node.location + tree.nodes.remove(old_output_node) - tree.links.new(render_pass, node_input) + output.name = "AYON File Output" + output.label = "AYON File Output" - return aov_file_products + return [] if multi_exr else aov_file_products def imprint_render_settings(node, data): @@ -228,17 +331,23 @@ def prepare_rendering(asset_group): aov_sep = get_aov_separator(settings) ext = get_image_format(settings) multilayer = get_multilayer(settings) + renderer = get_renderer(settings) + compositing = get_compositing(settings) set_render_format(ext, multilayer) - aov_list, custom_passes = set_render_passes(settings) + bpy.context.scene.render.engine = renderer + aov_list, custom_passes = set_render_passes(settings, renderer) output_path = Path.joinpath(dirpath, render_folder, file_name) render_product = get_render_product(output_path, name, aov_sep) aov_file_product = set_node_tree( - output_path, name, aov_sep, ext, multilayer) + output_path, render_product, name, aov_sep, + ext, multilayer, compositing) - bpy.context.scene.render.filepath = render_product + # Clear the render filepath, so that the output is handled only by the + # output node in the compositor. + bpy.context.scene.render.filepath = "" render_settings = { "render_folder": render_folder, diff --git a/client/ayon_core/hosts/blender/api/workio.py b/client/ayon_core/hosts/blender/api/workio.py index a8f6193abc..e0f333843a 100644 --- a/client/ayon_core/hosts/blender/api/workio.py +++ b/client/ayon_core/hosts/blender/api/workio.py @@ -82,7 +82,7 @@ def file_extensions() -> List[str]: def work_root(session: dict) -> str: """Return the default root to browse for work files.""" - work_dir = session["AVALON_WORKDIR"] + work_dir = session["AYON_WORKDIR"] scene_dir = session.get("AVALON_SCENEDIR") if scene_dir: return str(Path(work_dir, scene_dir)) diff --git a/client/ayon_core/hosts/blender/plugins/create/convert_legacy.py b/client/ayon_core/hosts/blender/plugins/create/convert_legacy.py index fcd4a7a26e..65a5a4a9b6 100644 --- a/client/ayon_core/hosts/blender/plugins/create/convert_legacy.py +++ b/client/ayon_core/hosts/blender/plugins/create/convert_legacy.py @@ -1,24 +1,24 @@ # -*- coding: utf-8 -*- -"""Converter for legacy Houdini subsets.""" +"""Converter for legacy Houdini products.""" from ayon_core.pipeline.create.creator_plugins import SubsetConvertorPlugin from ayon_core.hosts.blender.api.lib import imprint class BlenderLegacyConvertor(SubsetConvertorPlugin): - """Find and convert any legacy subsets in the scene. + """Find and convert any legacy products in the scene. - This Converter will find all legacy subsets in the scene and will - transform them to the current system. Since the old subsets doesn't + This Converter will find all legacy products in the scene and will + transform them to the current system. Since the old products doesn't retain any information about their original creators, the only mapping - we can do is based on their families. + we can do is based on their product types. - Its limitation is that you can have multiple creators creating subset - of the same family and there is no way to handle it. This code should - nevertheless cover all creators that came with OpenPype. + Its limitation is that you can have multiple creators creating product + of the same product type and there is no way to handle it. This code + should nevertheless cover all creators that came with OpenPype. """ identifier = "io.openpype.creators.blender.legacy" - family_to_id = { + product_type_to_id = { "action": "io.openpype.creators.blender.action", "camera": "io.openpype.creators.blender.camera", "animation": "io.openpype.creators.blender.animation", @@ -33,42 +33,42 @@ class BlenderLegacyConvertor(SubsetConvertorPlugin): def __init__(self, *args, **kwargs): super(BlenderLegacyConvertor, self).__init__(*args, **kwargs) - self.legacy_subsets = {} + self.legacy_instances = {} def find_instances(self): - """Find legacy subsets in the scene. + """Find legacy products in the scene. - Legacy subsets are the ones that doesn't have `creator_identifier` + Legacy products are the ones that doesn't have `creator_identifier` parameter on them. This is using cached entries done in - :py:meth:`~BaseCreator.cache_subsets()` + :py:meth:`~BaseCreator.cache_instance_data()` """ - self.legacy_subsets = self.collection_shared_data.get( - "blender_cached_legacy_subsets") - if not self.legacy_subsets: + self.legacy_instances = self.collection_shared_data.get( + "blender_cached_legacy_instances") + if not self.legacy_instances: return self.add_convertor_item( - "Found {} incompatible subset{}".format( - len(self.legacy_subsets), - "s" if len(self.legacy_subsets) > 1 else "" + "Found {} incompatible product{}".format( + len(self.legacy_instances), + "s" if len(self.legacy_instances) > 1 else "" ) ) def convert(self): - """Convert all legacy subsets to current. + """Convert all legacy products to current. It is enough to add `creator_identifier` and `instance_node`. """ - if not self.legacy_subsets: + if not self.legacy_instances: return - for family, instance_nodes in self.legacy_subsets.items(): - if family in self.family_to_id: + for product_type, instance_nodes in self.legacy_instances.items(): + if product_type in self.product_type_to_id: for instance_node in instance_nodes: - creator_identifier = self.family_to_id[family] + creator_identifier = self.product_type_to_id[product_type] self.log.info( "Converting {} to {}".format(instance_node.name, creator_identifier) diff --git a/client/ayon_core/hosts/blender/plugins/create/create_action.py b/client/ayon_core/hosts/blender/plugins/create/create_action.py index 2331daf7b7..070b9843c3 100644 --- a/client/ayon_core/hosts/blender/plugins/create/create_action.py +++ b/client/ayon_core/hosts/blender/plugins/create/create_action.py @@ -10,19 +10,21 @@ class CreateAction(plugin.BaseCreator): identifier = "io.openpype.creators.blender.action" label = "Action" - family = "action" + product_type = "action" icon = "male" def create( - self, subset_name: str, instance_data: dict, pre_create_data: dict + self, product_name: str, instance_data: dict, pre_create_data: dict ): # Run parent create method collection = super().create( - subset_name, instance_data, pre_create_data + product_name, instance_data, pre_create_data ) # Get instance name - name = plugin.prepare_scene_name(instance_data["asset"], subset_name) + name = plugin.prepare_scene_name( + instance_data["folderPath"], product_name + ) if pre_create_data.get("use_selection"): for obj in lib.get_selection(): diff --git a/client/ayon_core/hosts/blender/plugins/create/create_animation.py b/client/ayon_core/hosts/blender/plugins/create/create_animation.py index 8671d3bfdb..b806a5a7ca 100644 --- a/client/ayon_core/hosts/blender/plugins/create/create_animation.py +++ b/client/ayon_core/hosts/blender/plugins/create/create_animation.py @@ -8,15 +8,15 @@ class CreateAnimation(plugin.BaseCreator): identifier = "io.openpype.creators.blender.animation" label = "Animation" - family = "animation" + product_type = "animation" icon = "male" def create( - self, subset_name: str, instance_data: dict, pre_create_data: dict + self, product_name: str, instance_data: dict, pre_create_data: dict ): # Run parent create method collection = super().create( - subset_name, instance_data, pre_create_data + product_name, instance_data, pre_create_data ) if pre_create_data.get("use_selection"): diff --git a/client/ayon_core/hosts/blender/plugins/create/create_blendScene.py b/client/ayon_core/hosts/blender/plugins/create/create_blendScene.py index 6afb2ca6a0..51250bf18b 100644 --- a/client/ayon_core/hosts/blender/plugins/create/create_blendScene.py +++ b/client/ayon_core/hosts/blender/plugins/create/create_blendScene.py @@ -10,16 +10,16 @@ class CreateBlendScene(plugin.BaseCreator): identifier = "io.openpype.creators.blender.blendscene" label = "Blender Scene" - family = "blendScene" + product_type = "blendScene" icon = "cubes" maintain_selection = False def create( - self, subset_name: str, instance_data: dict, pre_create_data: dict + self, product_name: str, instance_data: dict, pre_create_data: dict ): - instance_node = super().create(subset_name, + instance_node = super().create(product_name, instance_data, pre_create_data) diff --git a/client/ayon_core/hosts/blender/plugins/create/create_camera.py b/client/ayon_core/hosts/blender/plugins/create/create_camera.py index c63a294cf9..cd82bec236 100644 --- a/client/ayon_core/hosts/blender/plugins/create/create_camera.py +++ b/client/ayon_core/hosts/blender/plugins/create/create_camera.py @@ -11,16 +11,16 @@ class CreateCamera(plugin.BaseCreator): identifier = "io.openpype.creators.blender.camera" label = "Camera" - family = "camera" + product_type = "camera" icon = "video-camera" create_as_asset_group = True def create( - self, subset_name: str, instance_data: dict, pre_create_data: dict + self, product_name: str, instance_data: dict, pre_create_data: dict ): - asset_group = super().create(subset_name, + asset_group = super().create(product_name, instance_data, pre_create_data) @@ -30,8 +30,8 @@ class CreateCamera(plugin.BaseCreator): obj.parent = asset_group else: plugin.deselect_all() - camera = bpy.data.cameras.new(subset_name) - camera_obj = bpy.data.objects.new(subset_name, camera) + camera = bpy.data.cameras.new(product_name) + camera_obj = bpy.data.objects.new(product_name, camera) instances = bpy.data.collections.get(AVALON_INSTANCES) instances.objects.link(camera_obj) diff --git a/client/ayon_core/hosts/blender/plugins/create/create_layout.py b/client/ayon_core/hosts/blender/plugins/create/create_layout.py index 3da3916aef..289c39fc38 100644 --- a/client/ayon_core/hosts/blender/plugins/create/create_layout.py +++ b/client/ayon_core/hosts/blender/plugins/create/create_layout.py @@ -10,16 +10,16 @@ class CreateLayout(plugin.BaseCreator): identifier = "io.openpype.creators.blender.layout" label = "Layout" - family = "layout" + product_type = "layout" icon = "cubes" create_as_asset_group = True def create( - self, subset_name: str, instance_data: dict, pre_create_data: dict + self, product_name: str, instance_data: dict, pre_create_data: dict ): - asset_group = super().create(subset_name, + asset_group = super().create(product_name, instance_data, pre_create_data) diff --git a/client/ayon_core/hosts/blender/plugins/create/create_model.py b/client/ayon_core/hosts/blender/plugins/create/create_model.py index b6d89c8862..837ba47417 100644 --- a/client/ayon_core/hosts/blender/plugins/create/create_model.py +++ b/client/ayon_core/hosts/blender/plugins/create/create_model.py @@ -10,15 +10,15 @@ class CreateModel(plugin.BaseCreator): identifier = "io.openpype.creators.blender.model" label = "Model" - family = "model" + product_type = "model" icon = "cube" create_as_asset_group = True def create( - self, subset_name: str, instance_data: dict, pre_create_data: dict + self, product_name: str, instance_data: dict, pre_create_data: dict ): - asset_group = super().create(subset_name, + asset_group = super().create(product_name, instance_data, pre_create_data) diff --git a/client/ayon_core/hosts/blender/plugins/create/create_pointcache.py b/client/ayon_core/hosts/blender/plugins/create/create_pointcache.py index 20ef3fbde4..0aa2d62c17 100644 --- a/client/ayon_core/hosts/blender/plugins/create/create_pointcache.py +++ b/client/ayon_core/hosts/blender/plugins/create/create_pointcache.py @@ -8,15 +8,15 @@ class CreatePointcache(plugin.BaseCreator): identifier = "io.openpype.creators.blender.pointcache" label = "Point Cache" - family = "pointcache" + product_type = "pointcache" icon = "gears" def create( - self, subset_name: str, instance_data: dict, pre_create_data: dict + self, product_name: str, instance_data: dict, pre_create_data: dict ): # Run parent create method collection = super().create( - subset_name, instance_data, pre_create_data + product_name, instance_data, pre_create_data ) if pre_create_data.get("use_selection"): diff --git a/client/ayon_core/hosts/blender/plugins/create/create_render.py b/client/ayon_core/hosts/blender/plugins/create/create_render.py index 82337a47f2..bf3d1e62b3 100644 --- a/client/ayon_core/hosts/blender/plugins/create/create_render.py +++ b/client/ayon_core/hosts/blender/plugins/create/create_render.py @@ -1,8 +1,10 @@ """Create render.""" import bpy +from ayon_core.lib import version_up from ayon_core.hosts.blender.api import plugin from ayon_core.hosts.blender.api.render_lib import prepare_rendering +from ayon_core.hosts.blender.api.workio import save_file class CreateRenderlayer(plugin.BaseCreator): @@ -10,16 +12,16 @@ class CreateRenderlayer(plugin.BaseCreator): identifier = "io.openpype.creators.blender.render" label = "Render" - family = "render" + product_type = "render" icon = "eye" def create( - self, subset_name: str, instance_data: dict, pre_create_data: dict + self, product_name: str, instance_data: dict, pre_create_data: dict ): try: # Run parent create method collection = super().create( - subset_name, instance_data, pre_create_data + product_name, instance_data, pre_create_data ) prepare_rendering(collection) @@ -37,6 +39,7 @@ class CreateRenderlayer(plugin.BaseCreator): # settings. Even the validator to check that the file is saved will # detect the file as saved, even if it isn't. The only solution for # now it is to force the file to be saved. - bpy.ops.wm.save_as_mainfile(filepath=bpy.data.filepath) + filepath = version_up(bpy.data.filepath) + save_file(filepath, copy=False) return collection diff --git a/client/ayon_core/hosts/blender/plugins/create/create_review.py b/client/ayon_core/hosts/blender/plugins/create/create_review.py index cf94819b3e..b478ec59f4 100644 --- a/client/ayon_core/hosts/blender/plugins/create/create_review.py +++ b/client/ayon_core/hosts/blender/plugins/create/create_review.py @@ -8,15 +8,15 @@ class CreateReview(plugin.BaseCreator): identifier = "io.openpype.creators.blender.review" label = "Review" - family = "review" + product_type = "review" icon = "video-camera" def create( - self, subset_name: str, instance_data: dict, pre_create_data: dict + self, product_name: str, instance_data: dict, pre_create_data: dict ): # Run parent create method collection = super().create( - subset_name, instance_data, pre_create_data + product_name, instance_data, pre_create_data ) if pre_create_data.get("use_selection"): diff --git a/client/ayon_core/hosts/blender/plugins/create/create_rig.py b/client/ayon_core/hosts/blender/plugins/create/create_rig.py index 07b33fe4ba..10b6b20d36 100644 --- a/client/ayon_core/hosts/blender/plugins/create/create_rig.py +++ b/client/ayon_core/hosts/blender/plugins/create/create_rig.py @@ -10,15 +10,15 @@ class CreateRig(plugin.BaseCreator): identifier = "io.openpype.creators.blender.rig" label = "Rig" - family = "rig" + product_type = "rig" icon = "wheelchair" create_as_asset_group = True def create( - self, subset_name: str, instance_data: dict, pre_create_data: dict + self, product_name: str, instance_data: dict, pre_create_data: dict ): - asset_group = super().create(subset_name, + asset_group = super().create(product_name, instance_data, pre_create_data) diff --git a/client/ayon_core/hosts/blender/plugins/create/create_workfile.py b/client/ayon_core/hosts/blender/plugins/create/create_workfile.py index 09947f85d1..ead3ed7749 100644 --- a/client/ayon_core/hosts/blender/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/blender/plugins/create/create_workfile.py @@ -19,7 +19,7 @@ class CreateWorkfile(BaseCreator, AutoCreator): """ identifier = "io.openpype.creators.blender.workfile" label = "Workfile" - family = "workfile" + product_type = "workfile" icon = "fa5.file" def create(self): @@ -43,8 +43,12 @@ class CreateWorkfile(BaseCreator, AutoCreator): if not workfile_instance: asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - task_name, task_name, asset_doc, project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + task_name, + host_name, ) data = { "folderPath": asset_name, @@ -53,17 +57,17 @@ class CreateWorkfile(BaseCreator, AutoCreator): } data.update( self.get_dynamic_data( - task_name, - task_name, - asset_doc, project_name, + asset_doc, + task_name, + task_name, host_name, workfile_instance, ) ) self.log.info("Auto-creating workfile instance...") workfile_instance = CreatedInstance( - self.family, subset_name, data, self + self.product_type, product_name, data, self ) self._add_instance_to_context(workfile_instance) @@ -73,13 +77,17 @@ class CreateWorkfile(BaseCreator, AutoCreator): ): # Update instance context if it's different asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - task_name, task_name, asset_doc, project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, ) workfile_instance["folderPath"] = asset_name workfile_instance["task"] = task_name - workfile_instance["subset"] = subset_name + workfile_instance["productName"] = product_name instance_node = bpy.data.collections.get(AVALON_CONTAINERS) if not instance_node: diff --git a/client/ayon_core/hosts/blender/plugins/load/import_workfile.py b/client/ayon_core/hosts/blender/plugins/load/import_workfile.py index 061c6108ad..5a801da848 100644 --- a/client/ayon_core/hosts/blender/plugins/load/import_workfile.py +++ b/client/ayon_core/hosts/blender/plugins/load/import_workfile.py @@ -4,10 +4,10 @@ from ayon_core.hosts.blender.api import plugin def append_workfile(context, fname, do_import): - asset = context['asset']['name'] - subset = context['subset']['name'] + folder_name = context['asset']['name'] + product_name = context['subset']['name'] - group_name = plugin.prepare_scene_name(asset, subset) + group_name = plugin.prepare_scene_name(folder_name, product_name) # We need to preserve the original names of the scenes, otherwise, # if there are duplicate names in the current workfile, the imported diff --git a/client/ayon_core/hosts/blender/plugins/load/load_abc.py b/client/ayon_core/hosts/blender/plugins/load/load_abc.py index b25f4eb277..3fc879f9c8 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_abc.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_abc.py @@ -134,13 +134,15 @@ class CacheModelLoader(plugin.AssetLoader): """ libpath = self.filepath_from_context(context) - asset = context["asset"]["name"] - subset = context["subset"]["name"] + folder_name = context["asset"]["name"] + product_name = context["subset"]["name"] - asset_name = plugin.prepare_scene_name(asset, subset) - unique_number = plugin.get_unique_number(asset, subset) - group_name = plugin.prepare_scene_name(asset, subset, unique_number) - namespace = namespace or f"{asset}_{unique_number}" + asset_name = plugin.prepare_scene_name(folder_name, product_name) + unique_number = plugin.get_unique_number(folder_name, product_name) + group_name = plugin.prepare_scene_name( + folder_name, product_name, unique_number + ) + namespace = namespace or f"{folder_name}_{unique_number}" containers = bpy.data.collections.get(AVALON_CONTAINERS) if not containers: @@ -159,6 +161,7 @@ class CacheModelLoader(plugin.AssetLoader): self._link_objects(objects, asset_group, containers, asset_group) + product_type = context["subset"]["data"]["family"] asset_group[AVALON_PROPERTY] = { "schema": "openpype:container-2.0", "id": AVALON_CONTAINER_ID, @@ -169,14 +172,14 @@ class CacheModelLoader(plugin.AssetLoader): "libpath": libpath, "asset_name": asset_name, "parent": str(context["representation"]["parent"]), - "family": context["representation"]["context"]["family"], + "productType": product_type, "objectName": group_name } self[:] = objects return objects - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -188,15 +191,16 @@ class CacheModelLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ + repre_doc = context["representation"] object_name = container["objectName"] asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_doc)) extension = libpath.suffix.lower() self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_doc, indent=2), ) assert asset_group, ( @@ -241,7 +245,7 @@ class CacheModelLoader(plugin.AssetLoader): asset_group.matrix_basis = mat metadata["libpath"] = str(libpath) - metadata["representation"] = str(representation["_id"]) + metadata["representation"] = str(repre_doc["_id"]) def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. diff --git a/client/ayon_core/hosts/blender/plugins/load/load_action.py b/client/ayon_core/hosts/blender/plugins/load/load_action.py index 5c8ba0df44..df7ffe439d 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_action.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_action.py @@ -44,11 +44,11 @@ class BlendActionLoader(plugin.AssetLoader): """ libpath = self.filepath_from_context(context) - asset = context["asset"]["name"] - subset = context["subset"]["name"] - lib_container = plugin.prepare_scene_name(asset, subset) + folder_name = context["asset"]["name"] + product_name = context["subset"]["name"] + lib_container = plugin.prepare_scene_name(folder_name, product_name) container_name = plugin.prepare_scene_name( - asset, subset, namespace + folder_name, product_name, namespace ) container = bpy.data.collections.new(lib_container) @@ -114,7 +114,7 @@ class BlendActionLoader(plugin.AssetLoader): self[:] = nodes return nodes - def update(self, container: Dict, representation: Dict): + def update(self, container: Dict, context: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -126,18 +126,18 @@ class BlendActionLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ - + repre_doc = context["representation"] collection = bpy.data.collections.get( container["objectName"] ) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_doc)) extension = libpath.suffix.lower() logger.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_doc, indent=2), ) assert collection, ( @@ -241,7 +241,7 @@ class BlendActionLoader(plugin.AssetLoader): # Save the list of objects in the metadata container collection_metadata["objects"] = objects_list collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) + collection_metadata["representation"] = str(repre_doc["_id"]) bpy.ops.object.select_all(action='DESELECT') diff --git a/client/ayon_core/hosts/blender/plugins/load/load_audio.py b/client/ayon_core/hosts/blender/plugins/load/load_audio.py index 007889f6f6..85d5277d40 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_audio.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_audio.py @@ -39,13 +39,15 @@ class AudioLoader(plugin.AssetLoader): options: Additional settings dictionary """ libpath = self.filepath_from_context(context) - asset = context["asset"]["name"] - subset = context["subset"]["name"] + folder_name = context["asset"]["name"] + product_name = context["subset"]["name"] - asset_name = plugin.prepare_scene_name(asset, subset) - unique_number = plugin.get_unique_number(asset, subset) - group_name = plugin.prepare_scene_name(asset, subset, unique_number) - namespace = namespace or f"{asset}_{unique_number}" + asset_name = plugin.prepare_scene_name(folder_name, product_name) + unique_number = plugin.get_unique_number(folder_name, product_name) + group_name = plugin.prepare_scene_name( + folder_name, product_name, unique_number + ) + namespace = namespace or f"{folder_name}_{unique_number}" avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) if not avalon_container: @@ -85,7 +87,7 @@ class AudioLoader(plugin.AssetLoader): "libpath": libpath, "asset_name": asset_name, "parent": str(context["representation"]["parent"]), - "family": context["representation"]["context"]["family"], + "productType": context["subset"]["data"]["family"], "objectName": group_name, "audio": audio } @@ -94,7 +96,7 @@ class AudioLoader(plugin.AssetLoader): self[:] = objects return [objects] - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Update an audio strip in the sequence editor. Arguments: @@ -103,14 +105,15 @@ class AudioLoader(plugin.AssetLoader): representation (openpype:representation-1.0): Representation to update, from `host.ls()`. """ + repre_doc = context["representation"] object_name = container["objectName"] asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_doc)) self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_doc, indent=2), ) assert asset_group, ( @@ -173,8 +176,8 @@ class AudioLoader(plugin.AssetLoader): window_manager.windows[-1].screen.areas[0].type = old_type metadata["libpath"] = str(libpath) - metadata["representation"] = str(representation["_id"]) - metadata["parent"] = str(representation["parent"]) + metadata["representation"] = str(repre_doc["_id"]) + metadata["parent"] = str(repre_doc["parent"]) metadata["audio"] = new_audio def exec_remove(self, container: Dict) -> bool: diff --git a/client/ayon_core/hosts/blender/plugins/load/load_blend.py b/client/ayon_core/hosts/blender/plugins/load/load_blend.py index c9862f9841..fdae9c1b6b 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_blend.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_blend.py @@ -127,20 +127,22 @@ class BlendLoader(plugin.AssetLoader): options: Additional settings dictionary """ libpath = self.filepath_from_context(context) - asset = context["asset"]["name"] - subset = context["subset"]["name"] + folder_name = context["asset"]["name"] + product_name = context["subset"]["name"] try: - family = context["representation"]["context"]["family"] + product_type = context["subset"]["data"]["family"] except ValueError: - family = "model" + product_type = "model" representation = str(context["representation"]["_id"]) - asset_name = plugin.prepare_scene_name(asset, subset) - unique_number = plugin.get_unique_number(asset, subset) - group_name = plugin.prepare_scene_name(asset, subset, unique_number) - namespace = namespace or f"{asset}_{unique_number}" + asset_name = plugin.prepare_scene_name(folder_name, product_name) + unique_number = plugin.get_unique_number(folder_name, product_name) + group_name = plugin.prepare_scene_name( + folder_name, product_name, unique_number + ) + namespace = namespace or f"{folder_name}_{unique_number}" avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) if not avalon_container: @@ -149,8 +151,8 @@ class BlendLoader(plugin.AssetLoader): container, members = self._process_data(libpath, group_name) - if family == "layout": - self._post_process_layout(container, asset, representation) + if product_type == "layout": + self._post_process_layout(container, folder_name, representation) avalon_container.objects.link(container) @@ -164,7 +166,7 @@ class BlendLoader(plugin.AssetLoader): "libpath": libpath, "asset_name": asset_name, "parent": str(context["representation"]["parent"]), - "family": context["representation"]["context"]["family"], + "productType": context["subset"]["data"]["family"], "objectName": group_name, "members": members, } @@ -179,13 +181,14 @@ class BlendLoader(plugin.AssetLoader): self[:] = objects return objects - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """ Update the loaded asset. """ + repre_doc = context["representation"] group_name = container["objectName"] asset_group = bpy.data.objects.get(group_name) - libpath = Path(get_representation_path(representation)).as_posix() + libpath = Path(get_representation_path(repre_doc)).as_posix() assert asset_group, ( f"The asset is not loaded: {container['objectName']}" @@ -232,8 +235,8 @@ class BlendLoader(plugin.AssetLoader): new_data = { "libpath": libpath, - "representation": str(representation["_id"]), - "parent": str(representation["parent"]), + "representation": str(repre_doc["_id"]), + "parent": str(repre_doc["parent"]), "members": members, } diff --git a/client/ayon_core/hosts/blender/plugins/load/load_blendscene.py b/client/ayon_core/hosts/blender/plugins/load/load_blendscene.py index 248bf5a901..52ecdd6a0a 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_blendscene.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_blendscene.py @@ -34,7 +34,7 @@ class BlendSceneLoader(plugin.AssetLoader): return None - def _process_data(self, libpath, group_name, family): + def _process_data(self, libpath, group_name, product_type): # Append all the data from the .blend file with bpy.data.libraries.load( libpath, link=False, relative=False @@ -82,25 +82,29 @@ class BlendSceneLoader(plugin.AssetLoader): options: Additional settings dictionary """ libpath = self.filepath_from_context(context) - asset = context["asset"]["name"] - subset = context["subset"]["name"] + folder_name = context["asset"]["name"] + product_name = context["subset"]["name"] try: - family = context["representation"]["context"]["family"] + product_type = context["subset"]["data"]["family"] except ValueError: - family = "model" + product_type = "model" - asset_name = plugin.prepare_scene_name(asset, subset) - unique_number = plugin.get_unique_number(asset, subset) - group_name = plugin.prepare_scene_name(asset, subset, unique_number) - namespace = namespace or f"{asset}_{unique_number}" + asset_name = plugin.prepare_scene_name(folder_name, product_name) + unique_number = plugin.get_unique_number(folder_name, product_name) + group_name = plugin.prepare_scene_name( + folder_name, product_name, unique_number + ) + namespace = namespace or f"{folder_name}_{unique_number}" avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) if not avalon_container: avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) bpy.context.scene.collection.children.link(avalon_container) - container, members = self._process_data(libpath, group_name, family) + container, members = self._process_data( + libpath, group_name, product_type + ) avalon_container.children.link(container) @@ -114,7 +118,7 @@ class BlendSceneLoader(plugin.AssetLoader): "libpath": libpath, "asset_name": asset_name, "parent": str(context["representation"]["parent"]), - "family": context["representation"]["context"]["family"], + "productType": context["subset"]["data"]["family"], "objectName": group_name, "members": members, } @@ -129,13 +133,14 @@ class BlendSceneLoader(plugin.AssetLoader): self[:] = objects return objects - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """ Update the loaded asset. """ + repre_doc = context["representation"] group_name = container["objectName"] asset_group = bpy.data.collections.get(group_name) - libpath = Path(get_representation_path(representation)).as_posix() + libpath = Path(get_representation_path(repre_doc)).as_posix() assert asset_group, ( f"The asset is not loaded: {container['objectName']}" @@ -167,8 +172,12 @@ class BlendSceneLoader(plugin.AssetLoader): self.exec_remove(container) - family = container["family"] - asset_group, members = self._process_data(libpath, group_name, family) + product_type = container.get("productType") + if product_type is None: + product_type = container["family"] + asset_group, members = self._process_data( + libpath, group_name, product_type + ) for member in members: if member.name in collection_parents: @@ -193,8 +202,8 @@ class BlendSceneLoader(plugin.AssetLoader): new_data = { "libpath": libpath, - "representation": str(representation["_id"]), - "parent": str(representation["parent"]), + "representation": str(repre_doc["_id"]), + "parent": str(repre_doc["parent"]), "members": members, } diff --git a/client/ayon_core/hosts/blender/plugins/load/load_camera_abc.py b/client/ayon_core/hosts/blender/plugins/load/load_camera_abc.py index 8f0bd6741d..da90f0b1ab 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_camera_abc.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_camera_abc.py @@ -84,13 +84,15 @@ class AbcCameraLoader(plugin.AssetLoader): libpath = self.filepath_from_context(context) - asset = context["asset"]["name"] - subset = context["subset"]["name"] + folder_name = context["asset"]["name"] + product_name = context["subset"]["name"] - asset_name = plugin.prepare_scene_name(asset, subset) - unique_number = plugin.get_unique_number(asset, subset) - group_name = plugin.prepare_scene_name(asset, subset, unique_number) - namespace = namespace or f"{asset}_{unique_number}" + asset_name = plugin.prepare_scene_name(folder_name, product_name) + unique_number = plugin.get_unique_number(folder_name, product_name) + group_name = plugin.prepare_scene_name( + folder_name, product_name, unique_number + ) + namespace = namespace or f"{folder_name}_{unique_number}" avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) if not avalon_container: @@ -121,14 +123,14 @@ class AbcCameraLoader(plugin.AssetLoader): "libpath": libpath, "asset_name": asset_name, "parent": str(context["representation"]["parent"]), - "family": context["representation"]["context"]["family"], + "productType": context["subset"]["data"]["family"], "objectName": group_name, } self[:] = objects return objects - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -140,15 +142,16 @@ class AbcCameraLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ + repre_doc = context["representation"] object_name = container["objectName"] asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_doc)) extension = libpath.suffix.lower() self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_doc, indent=2), ) assert asset_group, ( @@ -183,7 +186,7 @@ class AbcCameraLoader(plugin.AssetLoader): asset_group.matrix_basis = mat metadata["libpath"] = str(libpath) - metadata["representation"] = str(representation["_id"]) + metadata["representation"] = str(repre_doc["_id"]) def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. diff --git a/client/ayon_core/hosts/blender/plugins/load/load_camera_fbx.py b/client/ayon_core/hosts/blender/plugins/load/load_camera_fbx.py index 7642871dc7..2024356e70 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_camera_fbx.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_camera_fbx.py @@ -87,13 +87,15 @@ class FbxCameraLoader(plugin.AssetLoader): options: Additional settings dictionary """ libpath = self.filepath_from_context(context) - asset = context["asset"]["name"] - subset = context["subset"]["name"] + folder_name = context["asset"]["name"] + product_name = context["subset"]["name"] - asset_name = plugin.prepare_scene_name(asset, subset) - unique_number = plugin.get_unique_number(asset, subset) - group_name = plugin.prepare_scene_name(asset, subset, unique_number) - namespace = namespace or f"{asset}_{unique_number}" + asset_name = plugin.prepare_scene_name(folder_name, product_name) + unique_number = plugin.get_unique_number(folder_name, product_name) + group_name = plugin.prepare_scene_name( + folder_name, product_name, unique_number + ) + namespace = namespace or f"{folder_name}_{unique_number}" avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) if not avalon_container: @@ -124,14 +126,14 @@ class FbxCameraLoader(plugin.AssetLoader): "libpath": libpath, "asset_name": asset_name, "parent": str(context["representation"]["parent"]), - "family": context["representation"]["context"]["family"], + "productType": context["subset"]["data"]["family"], "objectName": group_name } self[:] = objects return objects - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -143,15 +145,16 @@ class FbxCameraLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ + repre_doc = context["representation"] object_name = container["objectName"] asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_doc)) extension = libpath.suffix.lower() self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_doc, indent=2), ) assert asset_group, ( @@ -193,7 +196,7 @@ class FbxCameraLoader(plugin.AssetLoader): asset_group.matrix_basis = mat metadata["libpath"] = str(libpath) - metadata["representation"] = str(representation["_id"]) + metadata["representation"] = str(repre_doc["_id"]) def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. diff --git a/client/ayon_core/hosts/blender/plugins/load/load_fbx.py b/client/ayon_core/hosts/blender/plugins/load/load_fbx.py index 03993c9f5e..7b4acfed9a 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_fbx.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_fbx.py @@ -131,13 +131,15 @@ class FbxModelLoader(plugin.AssetLoader): options: Additional settings dictionary """ libpath = self.filepath_from_context(context) - asset = context["asset"]["name"] - subset = context["subset"]["name"] + folder_name = context["asset"]["name"] + product_name = context["subset"]["name"] - asset_name = plugin.prepare_scene_name(asset, subset) - unique_number = plugin.get_unique_number(asset, subset) - group_name = plugin.prepare_scene_name(asset, subset, unique_number) - namespace = namespace or f"{asset}_{unique_number}" + asset_name = plugin.prepare_scene_name(folder_name, product_name) + unique_number = plugin.get_unique_number(folder_name, product_name) + group_name = plugin.prepare_scene_name( + folder_name, product_name, unique_number + ) + namespace = namespace or f"{folder_name}_{unique_number}" avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) if not avalon_container: @@ -168,14 +170,14 @@ class FbxModelLoader(plugin.AssetLoader): "libpath": libpath, "asset_name": asset_name, "parent": str(context["representation"]["parent"]), - "family": context["representation"]["context"]["family"], + "productType": context["subset"]["data"]["family"], "objectName": group_name } self[:] = objects return objects - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -187,15 +189,16 @@ class FbxModelLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ + repre_doc = context["representation"] object_name = container["objectName"] asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_doc)) extension = libpath.suffix.lower() self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_doc, indent=2), ) assert asset_group, ( @@ -248,7 +251,7 @@ class FbxModelLoader(plugin.AssetLoader): asset_group.matrix_basis = mat metadata["libpath"] = str(libpath) - metadata["representation"] = str(representation["_id"]) + metadata["representation"] = str(repre_doc["_id"]) def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. diff --git a/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py b/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py index f48862a803..84793775e5 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py @@ -50,11 +50,11 @@ class JsonLayoutLoader(plugin.AssetLoader): if anim_collection: bpy.data.collections.remove(anim_collection) - def _get_loader(self, loaders, family): + def _get_loader(self, loaders, product_type): name = "" - if family == 'rig': + if product_type == 'rig': name = "BlendRigLoader" - elif family == 'model': + elif product_type == 'model': name = "BlendModelLoader" if name == "": @@ -76,10 +76,12 @@ class JsonLayoutLoader(plugin.AssetLoader): for element in data: reference = element.get('reference') - family = element.get('family') + product_type = element.get("product_type") + if product_type is None: + product_type = element.get("family") loaders = loaders_from_representation(all_loaders, reference) - loader = self._get_loader(loaders, family) + loader = self._get_loader(loaders, product_type) if not loader: continue @@ -95,7 +97,7 @@ class JsonLayoutLoader(plugin.AssetLoader): 'parent': asset_group, 'transform': element.get('transform'), 'action': action, - 'create_animation': True if family == 'rig' else False, + 'create_animation': True if product_type == 'rig' else False, 'animation_asset': asset } @@ -127,7 +129,7 @@ class JsonLayoutLoader(plugin.AssetLoader): # legacy_create( # creator_plugin, # name="camera", - # # name=f"{unique_number}_{subset}_animation", + # # name=f"{unique_number}_{product[name]}_animation", # asset=asset, # options={"useSelection": False} # # data={"dependencies": str(context["representation"]["_id"])} @@ -146,13 +148,15 @@ class JsonLayoutLoader(plugin.AssetLoader): options: Additional settings dictionary """ libpath = self.filepath_from_context(context) - asset = context["asset"]["name"] - subset = context["subset"]["name"] + folder_name = context["asset"]["name"] + product_name = context["subset"]["name"] - asset_name = plugin.prepare_scene_name(asset, subset) - unique_number = plugin.get_unique_number(asset, subset) - group_name = plugin.prepare_scene_name(asset, subset, unique_number) - namespace = namespace or f"{asset}_{unique_number}" + asset_name = plugin.prepare_scene_name(folder_name, product_name) + unique_number = plugin.get_unique_number(folder_name, product_name) + group_name = plugin.prepare_scene_name( + folder_name, product_name, unique_number + ) + namespace = namespace or f"{folder_name}_{unique_number}" avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) if not avalon_container: @@ -177,14 +181,14 @@ class JsonLayoutLoader(plugin.AssetLoader): "libpath": libpath, "asset_name": asset_name, "parent": str(context["representation"]["parent"]), - "family": context["representation"]["context"]["family"], + "productType": context["subset"]["data"]["family"], "objectName": group_name } self[:] = asset_group.children return asset_group.children - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -193,15 +197,16 @@ class JsonLayoutLoader(plugin.AssetLoader): will not be removed, only unlinked. Normally this should not be the case though. """ + repre_doc = context["representation"] object_name = container["objectName"] asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_doc)) extension = libpath.suffix.lower() self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_doc, indent=2), ) assert asset_group, ( @@ -239,7 +244,10 @@ class JsonLayoutLoader(plugin.AssetLoader): for obj in asset_group.children: obj_meta = obj.get(AVALON_PROPERTY) - if obj_meta.get('family') == 'rig': + product_type = obj_meta.get("productType") + if product_type is None: + product_type = obj_meta.get("family") + if product_type == "rig": rig = None for child in obj.children: if child.type == 'ARMATURE': @@ -262,7 +270,7 @@ class JsonLayoutLoader(plugin.AssetLoader): asset_group.matrix_basis = mat metadata["libpath"] = str(libpath) - metadata["representation"] = str(representation["_id"]) + metadata["representation"] = str(repre_doc["_id"]) def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. diff --git a/client/ayon_core/hosts/blender/plugins/load/load_look.py b/client/ayon_core/hosts/blender/plugins/load/load_look.py index f9ebf98912..59896e0ae0 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_look.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_look.py @@ -93,18 +93,18 @@ class BlendLookLoader(plugin.AssetLoader): """ libpath = self.filepath_from_context(context) - asset = context["asset"]["name"] - subset = context["subset"]["name"] + folder_name = context["asset"]["name"] + product_name = context["subset"]["name"] lib_container = plugin.prepare_scene_name( - asset, subset + folder_name, product_name ) unique_number = plugin.get_unique_number( - asset, subset + folder_name, product_name ) - namespace = namespace or f"{asset}_{unique_number}" + namespace = namespace or f"{folder_name}_{unique_number}" container_name = plugin.prepare_scene_name( - asset, subset, unique_number + folder_name, product_name, unique_number ) container = bpy.data.collections.new(lib_container) @@ -131,22 +131,23 @@ class BlendLookLoader(plugin.AssetLoader): metadata["materials"] = materials metadata["parent"] = str(context["representation"]["parent"]) - metadata["family"] = context["representation"]["context"]["family"] + metadata["product_type"] = context["subset"]["data"]["family"] nodes = list(container.objects) nodes.append(container) self[:] = nodes return nodes - def update(self, container: Dict, representation: Dict): + def update(self, container: Dict, context: Dict): collection = bpy.data.collections.get(container["objectName"]) - libpath = Path(get_representation_path(representation)) + repre_doc = context["representation"] + libpath = Path(get_representation_path(repre_doc)) extension = libpath.suffix.lower() self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_doc, indent=2), ) assert collection, ( @@ -201,7 +202,7 @@ class BlendLookLoader(plugin.AssetLoader): collection_metadata["objects"] = objects collection_metadata["materials"] = materials collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) + collection_metadata["representation"] = str(repre_doc["_id"]) def remove(self, container: Dict) -> bool: collection = bpy.data.collections.get(container["objectName"]) diff --git a/client/ayon_core/hosts/blender/plugins/publish/collect_instance.py b/client/ayon_core/hosts/blender/plugins/publish/collect_instance.py index f9338cd30a..d47c69a270 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/collect_instance.py +++ b/client/ayon_core/hosts/blender/plugins/publish/collect_instance.py @@ -25,7 +25,7 @@ class CollectBlenderInstanceData(pyblish.api.InstancePlugin): members.extend(instance_node.children) # Special case for animation instances, include armatures - if instance.data["family"] == "animation": + if instance.data["productType"] == "animation": for obj in instance_node.objects: if obj.type == 'EMPTY' and obj.get(AVALON_PROPERTY): members.extend( diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_abc.py b/client/ayon_core/hosts/blender/plugins/publish/extract_abc.py index 2f89426e56..cf753637ea 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_abc.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_abc.py @@ -19,9 +19,9 @@ class ExtractABC(publish.Extractor, publish.OptionalPyblishPluginMixin): # Define extract output file path stagingdir = self.staging_dir(instance) - asset_name = instance.data["assetEntity"]["name"] - subset = instance.data["subset"] - instance_name = f"{asset_name}_{subset}" + folder_name = instance.data["assetEntity"]["name"] + product_name = instance.data["productName"] + instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.abc" filepath = os.path.join(stagingdir, filename) diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_abc_animation.py b/client/ayon_core/hosts/blender/plugins/publish/extract_abc_animation.py index 41ad2a99b8..0086dccd67 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_abc_animation.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_abc_animation.py @@ -23,9 +23,9 @@ class ExtractAnimationABC( # Define extract output file path stagingdir = self.staging_dir(instance) - asset_name = instance.data["assetEntity"]["name"] - subset = instance.data["subset"] - instance_name = f"{asset_name}_{subset}" + folder_name = instance.data["assetEntity"]["name"] + product_name = instance.data["productName"] + instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.abc" filepath = os.path.join(stagingdir, filename) diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_blend.py b/client/ayon_core/hosts/blender/plugins/publish/extract_blend.py index 00e4074f55..dd2e33df80 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_blend.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_blend.py @@ -20,9 +20,9 @@ class ExtractBlend(publish.Extractor, publish.OptionalPyblishPluginMixin): # Define extract output file path stagingdir = self.staging_dir(instance) - asset_name = instance.data["assetEntity"]["name"] - subset = instance.data["subset"] - instance_name = f"{asset_name}_{subset}" + folder_name = instance.data["assetEntity"]["name"] + product_name = instance.data["productName"] + instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.blend" filepath = os.path.join(stagingdir, filename) diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_blend_animation.py b/client/ayon_core/hosts/blender/plugins/publish/extract_blend_animation.py index 6b0d6195b6..da663b46ea 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_blend_animation.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_blend_animation.py @@ -23,9 +23,9 @@ class ExtractBlendAnimation( # Define extract output file path stagingdir = self.staging_dir(instance) - asset_name = instance.data["assetEntity"]["name"] - subset = instance.data["subset"] - instance_name = f"{asset_name}_{subset}" + folder_name = instance.data["assetEntity"]["name"] + product_name = instance.data["productName"] + instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.blend" filepath = os.path.join(stagingdir, filename) diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_camera_abc.py b/client/ayon_core/hosts/blender/plugins/publish/extract_camera_abc.py index efa1faa87c..ff14d70696 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_camera_abc.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_camera_abc.py @@ -21,9 +21,9 @@ class ExtractCameraABC(publish.Extractor, publish.OptionalPyblishPluginMixin): # Define extract output file path stagingdir = self.staging_dir(instance) - asset_name = instance.data["assetEntity"]["name"] - subset = instance.data["subset"] - instance_name = f"{asset_name}_{subset}" + folder_name = instance.data["assetEntity"]["name"] + product_name = instance.data["productName"] + instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.abc" filepath = os.path.join(stagingdir, filename) diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_camera_fbx.py b/client/ayon_core/hosts/blender/plugins/publish/extract_camera_fbx.py index b2b6cd602d..03059f1e13 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_camera_fbx.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_camera_fbx.py @@ -20,9 +20,9 @@ class ExtractCamera(publish.Extractor, publish.OptionalPyblishPluginMixin): # Define extract output file path stagingdir = self.staging_dir(instance) - asset_name = instance.data["assetEntity"]["name"] - subset = instance.data["subset"] - instance_name = f"{asset_name}_{subset}" + folder_name = instance.data["assetEntity"]["name"] + product_name = instance.data["productName"] + instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.fbx" filepath = os.path.join(stagingdir, filename) diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_fbx.py b/client/ayon_core/hosts/blender/plugins/publish/extract_fbx.py index 7e8c13fea8..8fea077e7c 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_fbx.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_fbx.py @@ -21,9 +21,9 @@ class ExtractFBX(publish.Extractor, publish.OptionalPyblishPluginMixin): # Define extract output file path stagingdir = self.staging_dir(instance) - asset_name = instance.data["assetEntity"]["name"] - subset = instance.data["subset"] - instance_name = f"{asset_name}_{subset}" + folder_name = instance.data["assetEntity"]["name"] + product_name = instance.data["productName"] + instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.fbx" filepath = os.path.join(stagingdir, filename) diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_fbx_animation.py b/client/ayon_core/hosts/blender/plugins/publish/extract_fbx_animation.py index 3ad4cc3aa9..b98167c741 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_fbx_animation.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_fbx_animation.py @@ -145,9 +145,9 @@ class ExtractAnimationFBX( root.select_set(True) armature.select_set(True) - asset_name = instance.data["assetEntity"]["name"] - subset = instance.data["subset"] - instance_name = f"{asset_name}_{subset}" + folder_name = instance.data["assetEntity"]["name"] + product_name = instance.data["productName"] + instance_name = f"{folder_name}_{product_name}" fbx_filename = f"{instance_name}_{armature.name}.fbx" filepath = os.path.join(stagingdir, fbx_filename) diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_layout.py b/client/ayon_core/hosts/blender/plugins/publish/extract_layout.py index f868db3e74..16c0392070 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_layout.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_layout.py @@ -133,7 +133,7 @@ class ExtractLayout(publish.Extractor, publish.OptionalPyblishPluginMixin): fbx_count = 0 - project_name = instance.context.data["projectEntity"]["name"] + project_name = instance.context.data["projectName"] for asset in asset_group.children: metadata = asset.get(AVALON_PROPERTY) if not metadata: @@ -147,7 +147,9 @@ class ExtractLayout(publish.Extractor, publish.OptionalPyblishPluginMixin): continue version_id = metadata["parent"] - family = metadata["family"] + product_type = metadata.get("product_type") + if product_type is None: + product_type = metadata["family"] self.log.debug("Parent: {}".format(version_id)) # Get blend reference @@ -179,7 +181,8 @@ class ExtractLayout(publish.Extractor, publish.OptionalPyblishPluginMixin): json_element["reference_fbx"] = str(fbx_id) if abc_id: json_element["reference_abc"] = str(abc_id) - json_element["family"] = family + + json_element["product_type"] = product_type json_element["instance_name"] = asset.name json_element["asset_name"] = metadata["asset_name"] json_element["file_path"] = metadata["libpath"] @@ -215,7 +218,7 @@ class ExtractLayout(publish.Extractor, publish.OptionalPyblishPluginMixin): ] # Extract the animation as well - if family == "rig": + if product_type == "rig": f, n = self._export_animation( asset, instance, stagingdir, fbx_count) if f: @@ -225,9 +228,9 @@ class ExtractLayout(publish.Extractor, publish.OptionalPyblishPluginMixin): json_data.append(json_element) - asset_name = instance.data["assetEntity"]["name"] - subset = instance.data["subset"] - instance_name = f"{asset_name}_{subset}" + folder_name = instance.data["assetEntity"]["name"] + product_name = instance.data["productName"] + instance_name = f"{folder_name}_{product_name}" json_filename = f"{instance_name}.json" json_path = os.path.join(stagingdir, json_filename) diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_playblast.py b/client/ayon_core/hosts/blender/plugins/publish/extract_playblast.py index 83e6b26fbe..acb09d0d77 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_playblast.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_playblast.py @@ -1,9 +1,11 @@ import os +import json + import clique +import pyblish.api import bpy -import pyblish.api from ayon_core.pipeline import publish from ayon_core.hosts.blender.api import capture from ayon_core.hosts.blender.api.lib import maintained_time @@ -23,6 +25,8 @@ class ExtractPlayblast(publish.Extractor, publish.OptionalPyblishPluginMixin): optional = True order = pyblish.api.ExtractorOrder + 0.01 + presets = "{}" + def process(self, instance): if not self.is_active(instance.data): return @@ -51,16 +55,15 @@ class ExtractPlayblast(publish.Extractor, publish.OptionalPyblishPluginMixin): # get output path stagingdir = self.staging_dir(instance) - asset_name = instance.data["assetEntity"]["name"] - subset = instance.data["subset"] - filename = f"{asset_name}_{subset}" + folder_name = instance.data["assetEntity"]["name"] + product_name = instance.data["productName"] + filename = f"{folder_name}_{product_name}" path = os.path.join(stagingdir, filename) self.log.debug(f"Outputting images to {path}") - project_settings = instance.context.data["project_settings"]["blender"] - presets = project_settings["publish"]["ExtractPlayblast"]["presets"] + presets = json.loads(self.presets) preset = presets.get("default") preset.update({ "camera": camera, diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_thumbnail.py b/client/ayon_core/hosts/blender/plugins/publish/extract_thumbnail.py index 7b445a0113..89168fb9c9 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_thumbnail.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_thumbnail.py @@ -1,5 +1,6 @@ import os import glob +import json import pyblish.api from ayon_core.pipeline import publish @@ -21,7 +22,7 @@ class ExtractThumbnail(publish.Extractor): hosts = ["blender"] families = ["review"] order = pyblish.api.ExtractorOrder + 0.01 - presets = {} + presets = "{}" def process(self, instance): self.log.debug("Extracting capture..") @@ -31,9 +32,9 @@ class ExtractThumbnail(publish.Extractor): return stagingdir = self.staging_dir(instance) - asset_name = instance.data["assetEntity"]["name"] - subset = instance.data["subset"] - filename = f"{asset_name}_{subset}" + folder_name = instance.data["assetEntity"]["name"] + product_name = instance.data["productName"] + filename = f"{folder_name}_{product_name}" path = os.path.join(stagingdir, filename) @@ -41,10 +42,11 @@ class ExtractThumbnail(publish.Extractor): camera = instance.data.get("review_camera", "AUTO") start = instance.data.get("frameStart", bpy.context.scene.frame_start) - family = instance.data.get("family") + product_type = instance.data["productType"] isolate = instance.data("isolate", None) - preset = self.presets.get(family, {}) + presets = json.loads(self.presets) + preset = presets.get(product_type, {}) preset.update({ "camera": camera, diff --git a/client/ayon_core/hosts/blender/plugins/publish/integrate_animation.py b/client/ayon_core/hosts/blender/plugins/publish/integrate_animation.py index c461d56e7c..a10144ebf5 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/integrate_animation.py +++ b/client/ayon_core/hosts/blender/plugins/publish/integrate_animation.py @@ -28,25 +28,26 @@ class IntegrateAnimation( # Update the json file for the setdress to add the published # representations of the animations for json_dict in data: + json_product_name = json_dict["productName"] i = None for elem in instance.context: - if elem.data.get('subset') == json_dict['subset']: + if elem.data["productName"] == json_product_name: i = elem break if not i: continue rep = None - pub_repr = i.data.get('published_representations') + pub_repr = i.data["published_representations"] for elem in pub_repr: - if pub_repr.get(elem).get('representation').get('name') == "fbx": - rep = pub_repr.get(elem) + if pub_repr[elem]["representation"]["name"] == "fbx": + rep = pub_repr[elem] break if not rep: continue - obj_id = rep.get('representation').get('_id') + obj_id = rep["representation"]["_id"] if obj_id: - json_dict['_id'] = str(obj_id) + json_dict["representation_id"] = str(obj_id) with open(json_path, "w") as file: json.dump(data, fp=file, indent=2) diff --git a/client/ayon_core/hosts/blender/plugins/publish/validate_deadline_publish.py b/client/ayon_core/hosts/blender/plugins/publish/validate_deadline_publish.py index b9310f9da0..b37db44cd4 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/validate_deadline_publish.py +++ b/client/ayon_core/hosts/blender/plugins/publish/validate_deadline_publish.py @@ -28,15 +28,27 @@ class ValidateDeadlinePublish(pyblish.api.InstancePlugin, def process(self, instance): if not self.is_active(instance.data): return + + tree = bpy.context.scene.node_tree + output_type = "CompositorNodeOutputFile" + output_node = None + # Remove all output nodes that inlcude "AYON" in the name. + # There should be only one. + for node in tree.nodes: + if node.bl_idname == output_type and "AYON" in node.name: + output_node = node + break + if not output_node: + raise PublishValidationError( + "No output node found in the compositor tree." + ) filepath = bpy.data.filepath file = os.path.basename(filepath) filename, ext = os.path.splitext(file) - if filename not in bpy.context.scene.render.filepath: + if filename not in output_node.base_path: raise PublishValidationError( - "Render output folder " - "doesn't match the blender scene name! " - "Use Repair action to " - "fix the folder file path." + "Render output folder doesn't match the blender scene name! " + "Use Repair action to fix the folder file path." ) @classmethod diff --git a/client/ayon_core/hosts/blender/plugins/publish/validate_file_saved.py b/client/ayon_core/hosts/blender/plugins/publish/validate_file_saved.py index 0b8762fed5..6a053eb47b 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/validate_file_saved.py +++ b/client/ayon_core/hosts/blender/plugins/publish/validate_file_saved.py @@ -26,6 +26,7 @@ class ValidateFileSaved(pyblish.api.ContextPlugin, hosts = ["blender"] label = "Validate File Saved" optional = False + # TODO rename to 'exclude_product_types' exclude_families = [] actions = [SaveWorkfileAction] @@ -41,8 +42,8 @@ class ValidateFileSaved(pyblish.api.ContextPlugin, # Do not validate workfile has unsaved changes if only instances # present of families that should be excluded - families = { - instance.data["family"] for instance in context + product_types = { + instance.data["productType"] for instance in context # Consider only enabled instances if instance.data.get("publish", True) and instance.data.get("active", True) @@ -52,7 +53,7 @@ class ValidateFileSaved(pyblish.api.ContextPlugin, return any(family in exclude_family for exclude_family in self.exclude_families) - if all(is_excluded(family) for family in families): + if all(is_excluded(product_type) for product_type in product_types): self.log.debug("Only excluded families found, skipping workfile " "unsaved changes validation..") return diff --git a/client/ayon_core/hosts/celaction/addon.py b/client/ayon_core/hosts/celaction/addon.py index 4573ee7e56..d00401a2e0 100644 --- a/client/ayon_core/hosts/celaction/addon.py +++ b/client/ayon_core/hosts/celaction/addon.py @@ -1,16 +1,13 @@ import os -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon CELACTION_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class CelactionAddon(OpenPypeModule, IHostAddon): +class CelactionAddon(AYONAddon, IHostAddon): name = "celaction" host_name = "celaction" - def initialize(self, module_settings): - self.enabled = True - def get_launch_hook_paths(self, app): if app.host_name != self.host_name: return [] diff --git a/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_instances.py b/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_instances.py index d0f4c59290..4306a53bfe 100644 --- a/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_instances.py +++ b/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_instances.py @@ -22,7 +22,7 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): asset_name = get_asset_name_identifier(asset_entity) shared_instance_data = { - "asset": asset_name, + "folderPath": asset_name, "frameStart": asset_entity["data"]["frameStart"], "frameEnd": asset_entity["data"]["frameEnd"], "handleStart": asset_entity["data"]["handleStart"], @@ -46,17 +46,18 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): shared_instance_data.update(celaction_kwargs) # workfile instance - family = "workfile" - subset = family + task.capitalize() + product_type = "workfile" + product_name = product_type + task.capitalize() # Create instance - instance = context.create_instance(subset) + instance = context.create_instance(product_name) # creating instance data instance.data.update({ - "subset": subset, "label": scene_file, - "family": family, - "families": [], + "productName": product_name, + "productType": product_type, + "family": product_type, + "families": [product_type], "representations": [] }) @@ -76,17 +77,19 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): self.log.info('Publishing Celaction workfile') # render instance - subset = f"render{task}Main" - instance = context.create_instance(name=subset) + product_name = f"render{task}Main" + product_type = "render.farm" + instance = context.create_instance(name=product_name) # getting instance state instance.data["publish"] = True # add assetEntity data into instance instance.data.update({ - "label": "{} - farm".format(subset), - "family": "render.farm", - "families": [], - "subset": subset + "label": "{} - farm".format(product_name), + "productType": product_type, + "family": product_type, + "families": [product_type], + "productName": product_name }) # adding basic script data diff --git a/client/ayon_core/hosts/celaction/plugins/publish/collect_render_path.py b/client/ayon_core/hosts/celaction/plugins/publish/collect_render_path.py index f6db6c000d..abe670b691 100644 --- a/client/ayon_core/hosts/celaction/plugins/publish/collect_render_path.py +++ b/client/ayon_core/hosts/celaction/plugins/publish/collect_render_path.py @@ -19,12 +19,14 @@ class CollectRenderPath(pyblish.api.InstancePlugin): anatomy = instance.context.data["anatomy"] anatomy_data = copy.deepcopy(instance.data["anatomyData"]) padding = anatomy.templates.get("frame_padding", 4) + product_type = "render" anatomy_data.update({ "frame": f"%0{padding}d", - "family": "render", + "family": product_type, "representation": self.output_extension, "ext": self.output_extension }) + anatomy_data["product"]["type"] = product_type anatomy_filled = anatomy.format(anatomy_data) diff --git a/client/ayon_core/hosts/flame/addon.py b/client/ayon_core/hosts/flame/addon.py index e30d7cab08..f5560cde7a 100644 --- a/client/ayon_core/hosts/flame/addon.py +++ b/client/ayon_core/hosts/flame/addon.py @@ -1,16 +1,13 @@ import os -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon HOST_DIR = os.path.dirname(os.path.abspath(__file__)) -class FlameAddon(OpenPypeModule, IHostAddon): +class FlameAddon(AYONAddon, IHostAddon): name = "flame" host_name = "flame" - def initialize(self, module_settings): - self.enabled = True - def add_implementation_envs(self, env, _app): # Add requirements to DL_PYTHON_HOOK_PATH env["DL_PYTHON_HOOK_PATH"] = os.path.join(HOST_DIR, "startup") diff --git a/client/ayon_core/hosts/flame/api/pipeline.py b/client/ayon_core/hosts/flame/api/pipeline.py index 88375f829f..532f89b5e9 100644 --- a/client/ayon_core/hosts/flame/api/pipeline.py +++ b/client/ayon_core/hosts/flame/api/pipeline.py @@ -147,8 +147,8 @@ def imprint(segment, data=None): Examples: data = { 'asset': 'sq020sh0280', - 'family': 'render', - 'subset': 'subsetMain' + 'productType': 'render', + 'productName': 'productMain' } """ data = data or {} diff --git a/client/ayon_core/hosts/flame/api/plugin.py b/client/ayon_core/hosts/flame/api/plugin.py index 720e6792b0..cf28a3cef3 100644 --- a/client/ayon_core/hosts/flame/api/plugin.py +++ b/client/ayon_core/hosts/flame/api/plugin.py @@ -353,9 +353,9 @@ class PublishableClip: rename_default = False hierarchy_default = "{_folder_}/{_sequence_}/{_track_}" clip_name_default = "shot_{_trackIndex_:0>3}_{_clipIndex_:0>4}" - subset_name_default = "[ track name ]" review_track_default = "[ none ]" - subset_family_default = "plate" + base_product_name_default = "[ track name ]" + base_product_type_default = "plate" count_from_default = 10 count_steps_default = 10 vertical_sync_default = False @@ -368,7 +368,7 @@ class PublishableClip: def __init__(self, segment, **kwargs): self.rename_index = kwargs["rename_index"] - self.family = kwargs["family"] + self.product_type = kwargs["family"] self.log = kwargs["log"] # get main parent objects @@ -486,10 +486,10 @@ class PublishableClip: "countFrom", {}).get("value") or self.count_from_default self.count_steps = self.ui_inputs.get( "countSteps", {}).get("value") or self.count_steps_default - self.subset_name = self.ui_inputs.get( - "subsetName", {}).get("value") or self.subset_name_default - self.subset_family = self.ui_inputs.get( - "subsetFamily", {}).get("value") or self.subset_family_default + self.base_product_name = self.ui_inputs.get( + "productName", {}).get("value") or self.base_product_name_default + self.base_product_type = self.ui_inputs.get( + "productType", {}).get("value") or self.base_product_type_default self.vertical_sync = self.ui_inputs.get( "vSyncOn", {}).get("value") or self.vertical_sync_default self.driving_layer = self.ui_inputs.get( @@ -509,12 +509,14 @@ class PublishableClip: or self.retimed_framerange_default ) - # build subset name from layer name - if self.subset_name == "[ track name ]": - self.subset_name = self.track_name + # build product name from layer name + if self.base_product_name == "[ track name ]": + self.base_product_name = self.track_name - # create subset for publishing - self.subset = self.subset_family + self.subset_name.capitalize() + # create product for publishing + self.product_name = ( + self.base_product_type + self.base_product_name.capitalize() + ) def _replace_hash_to_expression(self, name, text): """ Replace hash with number in correct padding. """ @@ -608,14 +610,14 @@ class PublishableClip: _hero_data = deepcopy(hero_data) _hero_data.update({"heroTrack": False}) if _in <= self.clip_in and _out >= self.clip_out: - data_subset = hero_data["subset"] + data_product_name = hero_data["productName"] # add track index in case duplicity of names in hero data - if self.subset in data_subset: - _hero_data["subset"] = self.subset + str( + if self.product_name in data_product_name: + _hero_data["productName"] = self.product_name + str( self.track_index) - # in case track name and subset name is the same then add - if self.subset_name == self.track_name: - _hero_data["subset"] = self.subset + # in case track name and product name is the same then add + if self.base_product_name == self.track_name: + _hero_data["productName"] = self.product_name # assign data to return hierarchy data to tag tag_hierarchy_data = _hero_data break @@ -637,9 +639,9 @@ class PublishableClip: "hierarchy": hierarchy_filled, "parents": self.parents, "hierarchyData": hierarchy_formatting_data, - "subset": self.subset, - "family": self.subset_family, - "families": [self.family] + "productName": self.product_name, + "productType": self.base_product_type, + "families": [self.base_product_type, self.product_type] } def _convert_to_entity(self, type, template): @@ -704,7 +706,7 @@ class ClipLoader(LoaderPlugin): _mapping = None _host_settings = None - def apply_settings(cls, project_settings, system_settings): + def apply_settings(cls, project_settings): plugin_type_settings = ( project_settings diff --git a/client/ayon_core/hosts/flame/api/workio.py b/client/ayon_core/hosts/flame/api/workio.py index 0e3cb7f5fd..eef10a4847 100644 --- a/client/ayon_core/hosts/flame/api/workio.py +++ b/client/ayon_core/hosts/flame/api/workio.py @@ -34,4 +34,4 @@ def current_file(): def work_root(session): - return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/") + return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/") diff --git a/client/ayon_core/hosts/flame/plugins/create/create_shot_clip.py b/client/ayon_core/hosts/flame/plugins/create/create_shot_clip.py index ee99040ca3..c73ee7510c 100644 --- a/client/ayon_core/hosts/flame/plugins/create/create_shot_clip.py +++ b/client/ayon_core/hosts/flame/plugins/create/create_shot_clip.py @@ -6,7 +6,7 @@ class CreateShotClip(opfapi.Creator): """Publishable clip""" label = "Create Publishable Clip" - family = "clip" + product_type = "clip" icon = "film" defaults = ["Main"] @@ -32,7 +32,7 @@ class CreateShotClip(opfapi.Creator): # open widget for plugins inputs results_back = self.create_widget( - "Pype publish attributes creator", + "AYON publish attributes creator", "Define sequential rename and fill hierarchy data.", gui_inputs ) @@ -62,7 +62,7 @@ class CreateShotClip(opfapi.Creator): "log": self.log, "ui_inputs": results_back, "avalon": self.data, - "family": self.data["family"] + "product_type": self.data["productType"] } for i, segment in enumerate(sorted_selected_segments): @@ -203,19 +203,19 @@ class CreateShotClip(opfapi.Creator): "target": "ui", "order": 3, "value": { - "subsetName": { + "productName": { "value": ["[ track name ]", "main", "bg", "fg", "bg", "animatic"], "type": "QComboBox", "label": "Subset Name", "target": "ui", - "toolTip": "chose subset name pattern, if [ track name ] is selected, name of track layer will be used", # noqa + "toolTip": "chose product name pattern, if [ track name ] is selected, name of track layer will be used", # noqa "order": 0}, - "subsetFamily": { + "productType": { "value": ["plate", "take"], "type": "QComboBox", "label": "Subset Family", - "target": "ui", "toolTip": "What use of this subset is for", # noqa + "target": "ui", "toolTip": "What use of this product is for", # noqa "order": 1}, "reviewTrack": { "value": ["< none >"] + gui_tracks, @@ -229,7 +229,7 @@ class CreateShotClip(opfapi.Creator): "type": "QCheckBox", "label": "Include audio", "target": "tag", - "toolTip": "Process subsets with corresponding audio", # noqa + "toolTip": "Process products with corresponding audio", # noqa "order": 3}, "sourceResolution": { "value": False, diff --git a/client/ayon_core/hosts/flame/plugins/load/load_clip.py b/client/ayon_core/hosts/flame/plugins/load/load_clip.py index 6f35196932..72a6f2a585 100644 --- a/client/ayon_core/hosts/flame/plugins/load/load_clip.py +++ b/client/ayon_core/hosts/flame/plugins/load/load_clip.py @@ -11,7 +11,7 @@ from ayon_core.lib.transcoding import ( class LoadClip(opfapi.ClipLoader): - """Load a subset to timeline as clip + """Load a product to timeline as clip Place clip to timeline on its asset origin timings collected during conforming to project @@ -31,14 +31,14 @@ class LoadClip(opfapi.ClipLoader): # settings reel_group_name = "OpenPype_Reels" reel_name = "Loaded" - clip_name_template = "{asset}_{subset}<_{output}>" + clip_name_template = "{folder[name]}_{product[name]}<_{output}>" """ Anatomy keys from version context data and dynamically added: - {layerName} - original layer name token - {layerUID} - original layer UID token - {originalBasename} - original clip name taken from file """ - layer_rename_template = "{asset}_{subset}<_{output}>" + layer_rename_template = "{folder[name]}_{product[name]}<_{output}>" layer_rename_patterns = [] def load(self, context, name, namespace, options): @@ -70,7 +70,7 @@ class LoadClip(opfapi.ClipLoader): self.log.info("Loading with colorspace: `{}`".format(colorspace)) # create workfile path - workfile_dir = os.environ["AVALON_WORKDIR"] + workfile_dir = os.environ["AYON_WORKDIR"] openclip_dir = os.path.join( workfile_dir, clip_name ) @@ -180,27 +180,27 @@ class LoadClip(opfapi.ClipLoader): # unwrapping segment from input clip pass - # def switch(self, container, representation): - # self.update(container, representation) + # def switch(self, container, context): + # self.update(container, context) - # def update(self, container, representation): + # def update(self, container, context): # """ Updating previously loaded clips # """ - # # load clip to timeline and get main variables + # repre_doc = context['representation'] # name = container['name'] # namespace = container['namespace'] # track_item = phiero.get_track_items( # track_item_name=namespace) # version = io.find_one({ # "type": "version", - # "_id": representation["parent"] + # "_id": repre_doc["parent"] # }) # version_data = version.get("data", {}) # version_name = version.get("name", None) # colorspace = version_data.get("colorspace", None) # object_name = "{}_{}".format(name, namespace) - # file = get_representation_path(representation).replace("\\", "/") + # file = get_representation_path(repre_doc).replace("\\", "/") # clip = track_item.source() # # reconnect media to new path @@ -225,7 +225,7 @@ class LoadClip(opfapi.ClipLoader): # # add variables related to version context # data_imprint.update({ - # "representation": str(representation["_id"]), + # "representation": str(repre_doc["_id"]), # "version": version_name, # "colorspace": colorspace, # "objectName": object_name diff --git a/client/ayon_core/hosts/flame/plugins/load/load_clip_batch.py b/client/ayon_core/hosts/flame/plugins/load/load_clip_batch.py index a66bf53622..9f81103cb4 100644 --- a/client/ayon_core/hosts/flame/plugins/load/load_clip_batch.py +++ b/client/ayon_core/hosts/flame/plugins/load/load_clip_batch.py @@ -10,7 +10,7 @@ from ayon_core.lib.transcoding import ( ) class LoadClipBatch(opfapi.ClipLoader): - """Load a subset to timeline as clip + """Load a product to timeline as clip Place clip to timeline on its asset origin timings collected during conforming to project @@ -29,14 +29,14 @@ class LoadClipBatch(opfapi.ClipLoader): # settings reel_name = "OP_LoadedReel" - clip_name_template = "{batch}_{asset}_{subset}<_{output}>" + clip_name_template = "{batch}_{folder[name]}_{product[name]}<_{output}>" """ Anatomy keys from version context data and dynamically added: - {layerName} - original layer name token - {layerUID} - original layer UID token - {originalBasename} - original clip name taken from file """ - layer_rename_template = "{asset}_{subset}<_{output}>" + layer_rename_template = "{folder[name]}_{product[name]}<_{output}>" layer_rename_patterns = [] def load(self, context, name, namespace, options): @@ -50,17 +50,33 @@ class LoadClipBatch(opfapi.ClipLoader): version_name = version.get("name", None) colorspace = self.get_colorspace(context) + clip_name_template = self.clip_name_template + layer_rename_template = self.layer_rename_template # in case output is not in context replace key to representation if not context["representation"]["context"].get("output"): - self.clip_name_template = self.clip_name_template.replace( + clip_name_template = clip_name_template.replace( "output", "representation") - self.layer_rename_template = self.layer_rename_template.replace( + layer_rename_template = layer_rename_template.replace( "output", "representation") + asset_doc = context["asset"] + subset_doc = context["subset"] formatting_data = deepcopy(context["representation"]["context"]) formatting_data["batch"] = self.batch.name.get_value() + formatting_data.update({ + "asset": asset_doc["name"], + "folder": { + "name": asset_doc["name"], + }, + "subset": subset_doc["name"], + "family": subset_doc["data"]["family"], + "product": { + "name": subset_doc["name"], + "type": subset_doc["data"]["family"], + } + }) - clip_name = StringTemplate(self.clip_name_template).format( + clip_name = StringTemplate(clip_name_template).format( formatting_data) # convert colorspace with ocio to flame mapping @@ -69,7 +85,7 @@ class LoadClipBatch(opfapi.ClipLoader): self.log.info("Loading with colorspace: `{}`".format(colorspace)) # create workfile path - workfile_dir = options.get("workdir") or os.environ["AVALON_WORKDIR"] + workfile_dir = options.get("workdir") or os.environ["AYON_WORKDIR"] openclip_dir = os.path.join( workfile_dir, clip_name ) @@ -86,7 +102,7 @@ class LoadClipBatch(opfapi.ClipLoader): "path": path.replace("\\", "/"), "colorspace": colorspace, "version": "v{:0>3}".format(version_name), - "layer_rename_template": self.layer_rename_template, + "layer_rename_template": layer_rename_template, "layer_rename_patterns": self.layer_rename_patterns, "context_data": formatting_data } diff --git a/client/ayon_core/hosts/flame/plugins/publish/collect_test_selection.py b/client/ayon_core/hosts/flame/plugins/publish/collect_test_selection.py index 0fb41eab78..7442e7df48 100644 --- a/client/ayon_core/hosts/flame/plugins/publish/collect_test_selection.py +++ b/client/ayon_core/hosts/flame/plugins/publish/collect_test_selection.py @@ -59,6 +59,6 @@ class CollectTestSelection(pyblish.api.ContextPlugin): opfapi.imprint(segment, { 'asset': segment.name.get_value(), - 'family': 'render', - 'subset': 'subsetMain' + 'productType': 'render', + 'productName': 'productMain' }) diff --git a/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_instances.py b/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_instances.py index 636cbd8031..9d6560023c 100644 --- a/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -3,6 +3,7 @@ from types import NoneType import pyblish import ayon_core.hosts.flame.api as opfapi from ayon_core.hosts.flame.otio import flame_export +from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID from ayon_core.pipeline.editorial import ( is_overlapping_otio_ranges, get_media_range_with_retimes @@ -47,7 +48,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): if not marker_data: continue - if marker_data.get("id") != "pyblish.avalon.instance": + if marker_data.get("id") not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: continue self.log.debug("__ segment.name: {}".format( @@ -107,24 +110,25 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # add ocio_data to instance data inst_data.update(otio_data) - asset = marker_data["asset"] - subset = marker_data["subset"] + folder_path = marker_data["folderPath"] + folder_name = folder_path.rsplit("/")[-1] + product_name = marker_data["productName"] - # insert family into families - family = marker_data["family"] + # insert product type into families + product_type = marker_data["productType"] families = [str(f) for f in marker_data["families"]] - families.insert(0, str(family)) + families.insert(0, str(product_type)) # form label - label = asset - if asset != clip_name: + label = folder_name + if folder_name != clip_name: label += " ({})".format(clip_name) - label += " {} [{}]".format(subset, ", ".join(families)) + label += " {} [{}]".format(product_name, ", ".join(families)) inst_data.update({ - "name": "{}_{}".format(asset, subset), + "name": "{}_{}".format(folder_name, product_name), "label": label, - "asset": asset, + "folderPath": folder_path, "item": segment, "families": families, "publish": marker_data["publish"], @@ -332,26 +336,28 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): if not hierarchy_data: return - asset = data["asset"] - subset = "shotMain" + folder_path = data["folderPath"] + folder_name = folder_path.rsplit("/")[-1] + product_name = "shotMain" - # insert family into families - family = "shot" + # insert product type into families + product_type = "shot" # form label - label = asset - if asset != clip_name: + label = folder_name + if folder_name != clip_name: label += " ({}) ".format(clip_name) - label += " {}".format(subset) - label += " [{}]".format(family) + label += " {}".format(product_name) + label += " [{}]".format(product_type) data.update({ - "name": "{}_{}".format(asset, subset), + "name": "{}_{}".format(folder_name, product_name), "label": label, - "subset": subset, - "asset": asset, - "family": family, - "families": [] + "productName": product_name, + "folderPath": folder_path, + "productType": product_type, + "family": product_type, + "families": [product_type] }) instance = context.create_instance(**data) diff --git a/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_otio.py b/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_otio.py index 6a3e99aa55..6d04d53cea 100644 --- a/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_otio.py +++ b/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_otio.py @@ -3,7 +3,7 @@ import pyblish.api from ayon_core.client import get_asset_name_identifier import ayon_core.hosts.flame.api as opfapi from ayon_core.hosts.flame.otio import flame_export -from ayon_core.pipeline.create import get_subset_name +from ayon_core.pipeline.create import get_product_name class CollecTimelineOTIO(pyblish.api.ContextPlugin): @@ -14,7 +14,7 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin): def process(self, context): # plugin defined - family = "workfile" + product_type = "workfile" variant = "otioTimeline" # main @@ -23,29 +23,30 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin): project = opfapi.get_current_project() sequence = opfapi.get_current_sequence(opfapi.CTX.selection) - # create subset name - subset_name = get_subset_name( - family, - variant, - task_name, - asset_doc, + # create product name + product_name = get_product_name( context.data["projectName"], + asset_doc, + task_name, context.data["hostName"], + product_type, + variant, project_settings=context.data["project_settings"] ) - asset_name = get_asset_name_identifier(asset_doc) + folder_path = get_asset_name_identifier(asset_doc) # adding otio timeline to context with opfapi.maintained_segment_selection(sequence) as selected_seg: otio_timeline = flame_export.create_otio_timeline(sequence) instance_data = { - "name": subset_name, - "asset": asset_name, - "subset": subset_name, - "family": "workfile", - "families": [] + "name": product_name, + "folderPath": folder_path, + "productName": product_name, + "productType": product_type, + "family": product_type, + "families": [product_type] } # create instance with workfile diff --git a/client/ayon_core/hosts/flame/plugins/publish/extract_subset_resources.py b/client/ayon_core/hosts/flame/plugins/publish/extract_subset_resources.py index af699fd03a..cae08cd76b 100644 --- a/client/ayon_core/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/client/ayon_core/hosts/flame/plugins/publish/extract_subset_resources.py @@ -1,6 +1,5 @@ import os import re -import tempfile from copy import deepcopy import pyblish.api @@ -15,12 +14,12 @@ from ayon_core.pipeline.editorial import ( import flame -class ExtractSubsetResources(publish.Extractor): +class ExtractProductResources(publish.Extractor): """ Extractor for transcoding files from Flame clip """ - label = "Extract subset resources" + label = "Extract product resources" order = pyblish.api.ExtractorOrder families = ["clip"] hosts = ["flame"] @@ -47,7 +46,7 @@ class ExtractSubsetResources(publish.Extractor): hide_ui_on_process = True # settings - export_presets_mapping = {} + export_presets_mapping = [] def process(self, instance): if not self.keep_original_representation: @@ -56,7 +55,7 @@ class ExtractSubsetResources(publish.Extractor): # flame objects segment = instance.data["item"] - asset_name = instance.data["asset"] + folder_path = instance.data["folderPath"] segment_name = segment.name.get_value() clip_path = instance.data["path"] sequence_clip = instance.context.data["flameSequence"] @@ -146,15 +145,21 @@ class ExtractSubsetResources(publish.Extractor): # append staging dir for later cleanup instance.context.data["cleanupFullPaths"].append(staging_dir) + export_presets_mapping = {} + for preset_mapping in deepcopy(self.export_presets_mapping): + name = preset_mapping.pop("name") + export_presets_mapping[name] = preset_mapping + # add default preset type for thumbnail and reviewable video # update them with settings and override in case the same # are found in there - _preset_keys = [k.split('_')[0] for k in self.export_presets_mapping] + _preset_keys = [k.split('_')[0] for k in export_presets_mapping] export_presets = { - k: v for k, v in deepcopy(self.default_presets).items() + k: v + for k, v in deepcopy(self.default_presets).items() if k not in _preset_keys } - export_presets.update(self.export_presets_mapping) + export_presets.update(export_presets_mapping) if not instance.data.get("versionData"): instance.data["versionData"] = {} @@ -244,7 +249,7 @@ class ExtractSubsetResources(publish.Extractor): out_mark = in_mark + source_duration_handles exporting_clip = self.import_clip(clip_path) exporting_clip.name.set_value("{}_{}".format( - asset_name, segment_name)) + folder_path, segment_name)) # add xml tags modifications modify_xml_data.update({ diff --git a/client/ayon_core/hosts/flame/plugins/publish/integrate_batch_group.py b/client/ayon_core/hosts/flame/plugins/publish/integrate_batch_group.py index 3458bd3002..a66ee9f2c0 100644 --- a/client/ayon_core/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/client/ayon_core/hosts/flame/plugins/publish/integrate_batch_group.py @@ -44,8 +44,8 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): )) # load plate to batch group - self.log.info("Loading subset `{}` into batch `{}`".format( - instance.data["subset"], bgroup.name.get_value() + self.log.info("Loading product `{}` into batch `{}`".format( + instance.data["productName"], bgroup.name.get_value() )) self._load_clip_to_context(instance, bgroup) @@ -168,10 +168,10 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): handle_start = instance.data["handleStart"] handle_end = instance.data["handleEnd"] frame_duration = (frame_end - frame_start) + 1 - asset_name = instance.data["asset"] + folder_path = instance.data["folderPath"] task_name = task_data["name"] - batchgroup_name = "{}_{}".format(asset_name, task_name) + batchgroup_name = "{}_{}".format(folder_path, task_name) batch_data = { "shematic_reels": [ diff --git a/client/ayon_core/hosts/fusion/addon.py b/client/ayon_core/hosts/fusion/addon.py index 7eff2d93c8..54e48ea7bf 100644 --- a/client/ayon_core/hosts/fusion/addon.py +++ b/client/ayon_core/hosts/fusion/addon.py @@ -1,6 +1,6 @@ import os import re -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon from ayon_core.lib import Logger FUSION_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -22,7 +22,7 @@ def get_fusion_version(app_name): The function is triggered by the prelaunch hooks to get the fusion version. `app_name` is obtained by prelaunch hooks from the - `launch_context.env.get("AVALON_APP_NAME")`. + `launch_context.env.get("AYON_APP_NAME")`. To get a correct Fusion version, a version number should be present in the `applications/fusion/variants` key @@ -48,13 +48,10 @@ def get_fusion_version(app_name): ) -class FusionAddon(OpenPypeModule, IHostAddon): +class FusionAddon(AYONAddon, IHostAddon): name = "fusion" host_name = "fusion" - def initialize(self, module_settings): - self.enabled = True - def get_launch_hook_paths(self, app): if app.host_name != self.host_name: return [] diff --git a/client/ayon_core/hosts/fusion/api/pipeline.py b/client/ayon_core/hosts/fusion/api/pipeline.py index 7c480704a5..0e9e0724c7 100644 --- a/client/ayon_core/hosts/fusion/api/pipeline.py +++ b/client/ayon_core/hosts/fusion/api/pipeline.py @@ -135,7 +135,7 @@ class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): return current_filepath def work_root(self, session): - work_dir = session["AVALON_WORKDIR"] + work_dir = session["AYON_WORKDIR"] scene_dir = session.get("AVALON_SCENEDIR") if scene_dir: return os.path.join(work_dir, scene_dir) diff --git a/client/ayon_core/hosts/fusion/api/plugin.py b/client/ayon_core/hosts/fusion/api/plugin.py index 12a29d2986..95db8126e7 100644 --- a/client/ayon_core/hosts/fusion/api/plugin.py +++ b/client/ayon_core/hosts/fusion/api/plugin.py @@ -11,9 +11,10 @@ from ayon_core.lib import ( EnumDef, ) from ayon_core.pipeline import ( - legacy_io, Creator, - CreatedInstance + CreatedInstance, + AVALON_INSTANCE_ID, + AYON_INSTANCE_ID, ) @@ -32,14 +33,16 @@ class GenericCreateSaver(Creator): # TODO: This should be renamed together with Nuke so it is aligned temp_rendering_path_template = ( - "{workdir}/renders/fusion/{subset}/{subset}.{frame}.{ext}") + "{workdir}/renders/fusion/{product[name]}/" + "{product[name]}.{frame}.{ext}" + ) - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): self.pass_pre_attributes_to_instance(instance_data, pre_create_data) instance = CreatedInstance( - family=self.family, - subset_name=subset_name, + product_type=self.product_type, + product_name=product_name, data=instance_data, creator=self, ) @@ -110,23 +113,23 @@ class GenericCreateSaver(Creator): tool.SetData(f"openpype.{key}", value) def _update_tool_with_data(self, tool, data): - """Update tool node name and output path based on subset data""" - if "subset" not in data: + """Update tool node name and output path based on product data""" + if "productName" not in data: return - original_subset = tool.GetData("openpype.subset") + original_product_name = tool.GetData("openpype.productName") original_format = tool.GetData( "openpype.creator_attributes.image_format" ) - subset = data["subset"] + product_name = data["productName"] if ( - original_subset != subset + original_product_name != product_name or original_format != data["creator_attributes"]["image_format"] ): - self._configure_saver_tool(data, tool, subset) + self._configure_saver_tool(data, tool, product_name) - def _configure_saver_tool(self, data, tool, subset): + def _configure_saver_tool(self, data, tool, product_name): formatting_data = deepcopy(data) # get frame padding from anatomy templates @@ -136,27 +139,50 @@ class GenericCreateSaver(Creator): ext = data["creator_attributes"]["image_format"] # Subset change detected - workdir = os.path.normpath(legacy_io.Session["AVALON_WORKDIR"]) + product_type = formatting_data["productType"] + f_product_name = formatting_data["productName"] + + folder_path = formatting_data["folderPath"] + folder_name = folder_path.rsplit("/", 1)[-1] + + workdir = os.path.normpath(os.getenv("AYON_WORKDIR")) formatting_data.update({ "workdir": workdir, "frame": "0" * frame_padding, "ext": ext, "product": { - "name": formatting_data["subset"], - "type": formatting_data["family"], + "name": f_product_name, + "type": product_type, }, + # TODO add more variants for 'folder' and 'task' + "folder": { + "name": folder_name, + }, + "task": { + "name": data["task"], + }, + # Backwards compatibility + "asset": folder_name, + "subset": f_product_name, + "family": product_type, }) # build file path to render - filepath = self.temp_rendering_path_template.format(**formatting_data) + # TODO make sure the keys are available in 'formatting_data' + temp_rendering_path_template = ( + self.temp_rendering_path_template + .replace("{task}", "{task[name]}") + ) + + filepath = temp_rendering_path_template.format(**formatting_data) comp = get_current_comp() tool["Clip"] = comp.ReverseMapPath(os.path.normpath(filepath)) # Rename tool - if tool.Name != subset: - print(f"Renaming {tool.Name} -> {subset}") - tool.SetAttrs({"TOOLS_Name": subset}) + if tool.Name != product_name: + print(f"Renaming {tool.Name} -> {product_name}") + tool.SetAttrs({"TOOLS_Name": product_name}) def get_managed_tool_data(self, tool): """Return data of the tool if it matches creator identifier""" @@ -164,13 +190,13 @@ class GenericCreateSaver(Creator): if not isinstance(data, dict): return - required = { - "id": "pyblish.avalon.instance", - "creator_identifier": self.identifier, - } - for key, value in required.items(): - if key not in data or data[key] != value: - return + if ( + data.get("creator_identifier") != self.identifier + or data.get("id") not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + } + ): + return # Get active state from the actual tool state attrs = tool.GetAttrs() diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py index f63aaa1eb4..5aa2783129 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py @@ -131,7 +131,7 @@ class FusionCopyPrefsPrelaunch(PreLaunchHook): ) = self.get_copy_fusion_prefs_settings() # Get launched application context and return correct app version - app_name = self.launch_context.env.get("AVALON_APP_NAME") + app_name = self.launch_context.env.get("AYON_APP_NAME") app_version = get_fusion_version(app_name) if app_version is None: version_names = ", ".join(str(x) for x in FUSION_VERSIONS_DICT) diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py index 7cfa9d0a26..7eaf2ddc02 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py @@ -28,7 +28,7 @@ class FusionPrelaunch(PreLaunchHook): def execute(self): # making sure python 3 is installed at provided path # Py 3.3-3.10 for Fusion 18+ or Py 3.6 for Fu 16-17 - app_data = self.launch_context.env.get("AVALON_APP_NAME") + app_data = self.launch_context.env.get("AYON_APP_NAME") app_version = get_fusion_version(app_data) if not app_version: raise ApplicationLaunchFailed( diff --git a/client/ayon_core/hosts/fusion/plugins/create/create_image_saver.py b/client/ayon_core/hosts/fusion/plugins/create/create_image_saver.py index 856d86cff6..8110898ae9 100644 --- a/client/ayon_core/hosts/fusion/plugins/create/create_image_saver.py +++ b/client/ayon_core/hosts/fusion/plugins/create/create_image_saver.py @@ -17,7 +17,7 @@ class CreateImageSaver(GenericCreateSaver): identifier = "io.openpype.creators.fusion.imagesaver" label = "Image (saver)" name = "image" - family = "image" + product_type = "image" description = "Fusion Saver to generate image" default_frame = 0 diff --git a/client/ayon_core/hosts/fusion/plugins/create/create_saver.py b/client/ayon_core/hosts/fusion/plugins/create/create_saver.py index 1a0dad7060..b5abb2d949 100644 --- a/client/ayon_core/hosts/fusion/plugins/create/create_saver.py +++ b/client/ayon_core/hosts/fusion/plugins/create/create_saver.py @@ -12,7 +12,7 @@ class CreateSaver(GenericCreateSaver): identifier = "io.openpype.creators.fusion.saver" label = "Render (saver)" name = "render" - family = "render" + product_type = "render" description = "Fusion Saver to generate image sequence" default_frame_range_option = "asset_db" diff --git a/client/ayon_core/hosts/fusion/plugins/create/create_workfile.py b/client/ayon_core/hosts/fusion/plugins/create/create_workfile.py index 08d39b0145..dfd9da3df1 100644 --- a/client/ayon_core/hosts/fusion/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/fusion/plugins/create/create_workfile.py @@ -10,7 +10,7 @@ from ayon_core.pipeline import ( class FusionWorkfileCreator(AutoCreator): identifier = "workfile" - family = "workfile" + product_type = "workfile" label = "Workfile" icon = "fa5.file" @@ -27,9 +27,12 @@ class FusionWorkfileCreator(AutoCreator): if not data: return + product_name = data.get("productName") + if product_name is None: + product_name = data["subset"] instance = CreatedInstance( - family=self.family, - subset_name=data["subset"], + product_type=self.product_type, + product_name=product_name, data=data, creator=self ) @@ -59,7 +62,7 @@ class FusionWorkfileCreator(AutoCreator): existing_instance = None for instance in self.create_context.instances: - if instance.family == self.family: + if instance.product_type == self.product_type: existing_instance = instance break @@ -75,9 +78,12 @@ class FusionWorkfileCreator(AutoCreator): if existing_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - self.default_variant, task_name, asset_doc, - project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, ) data = { "folderPath": asset_name, @@ -85,12 +91,17 @@ class FusionWorkfileCreator(AutoCreator): "variant": self.default_variant, } data.update(self.get_dynamic_data( - self.default_variant, task_name, asset_doc, - project_name, host_name, None + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, + None + )) new_instance = CreatedInstance( - self.family, subset_name, data, self + self.product_type, product_name, data, self ) new_instance.transient_data["comp"] = comp self._add_instance_to_context(new_instance) @@ -100,10 +111,13 @@ class FusionWorkfileCreator(AutoCreator): or existing_instance["task"] != task_name ): asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - self.default_variant, task_name, asset_doc, - project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, ) existing_instance["folderPath"] = asset_name existing_instance["task"] = task_name - existing_instance["subset"] = subset_name + existing_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_alembic.py b/client/ayon_core/hosts/fusion/plugins/load/load_alembic.py index 0bc7ffd180..17f043bb34 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_alembic.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_alembic.py @@ -44,23 +44,24 @@ class FusionLoadAlembicMesh(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """Update Alembic path""" tool = container["_tool"] assert tool.ID == self.tool_type, f"Must be {self.tool_type}" comp = tool.Comp() - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) with comp_lock_and_undo_chunk(comp, "Update tool"): tool["Filename"] = path # Update the imprinted representation - tool.SetData("avalon.representation", str(representation["_id"])) + tool.SetData("avalon.representation", str(repre_doc["_id"])) def remove(self, container): tool = container["_tool"] diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_fbx.py b/client/ayon_core/hosts/fusion/plugins/load/load_fbx.py index 3751d7cc39..75320431a8 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_fbx.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_fbx.py @@ -59,23 +59,24 @@ class FusionLoadFBXMesh(load.LoaderPlugin): loader=self.__class__.__name__, ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """Update path""" tool = container["_tool"] assert tool.ID == self.tool_type, f"Must be {self.tool_type}" comp = tool.Comp() - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) with comp_lock_and_undo_chunk(comp, "Update tool"): tool["ImportFile"] = path # Update the imprinted representation - tool.SetData("avalon.representation", str(representation["_id"])) + tool.SetData("avalon.representation", str(repre_doc["_id"])) def remove(self, container): tool = container["_tool"] diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_sequence.py b/client/ayon_core/hosts/fusion/plugins/load/load_sequence.py index 5c183f5159..678da54ad6 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_sequence.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_sequence.py @@ -136,7 +136,7 @@ class FusionLoadSequence(load.LoaderPlugin): "render", "plate", "image", - "onilne", + "online", ] representations = ["*"] extensions = set( @@ -175,10 +175,10 @@ class FusionLoadSequence(load.LoaderPlugin): loader=self.__class__.__name__, ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Fusion automatically tries to reset some variables when changing @@ -224,7 +224,8 @@ class FusionLoadSequence(load.LoaderPlugin): assert tool.ID == "Loader", "Must be Loader" comp = tool.Comp() - context = get_representation_context(representation) + repre_doc = context["representation"] + context = get_representation_context(repre_doc) path = self.filepath_from_context(context) # Get start frame from version data @@ -255,7 +256,7 @@ class FusionLoadSequence(load.LoaderPlugin): ) # Update the imprinted representation - tool.SetData("avalon.representation", str(representation["_id"])) + tool.SetData("avalon.representation", str(repre_doc["_id"])) def remove(self, container): tool = container["_tool"] diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_usd.py b/client/ayon_core/hosts/fusion/plugins/load/load_usd.py index 9c61894d66..e315c84713 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_usd.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_usd.py @@ -28,9 +28,8 @@ class FusionLoadUSD(load.LoaderPlugin): tool_type = "uLoader" @classmethod - def apply_settings(cls, project_settings, system_settings): - super(FusionLoadUSD, cls).apply_settings(project_settings, - system_settings) + def apply_settings(cls, project_settings): + super(FusionLoadUSD, cls).apply_settings(project_settings) if cls.enabled: # Enable only in Fusion 18.5+ fusion = get_fusion_module() @@ -61,22 +60,23 @@ class FusionLoadUSD(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): tool = container["_tool"] assert tool.ID == self.tool_type, f"Must be {self.tool_type}" comp = tool.Comp() - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) with comp_lock_and_undo_chunk(comp, "Update tool"): tool["Filename"] = path # Update the imprinted representation - tool.SetData("avalon.representation", str(representation["_id"])) + tool.SetData("avalon.representation", str(repre_doc["_id"])) def remove(self, container): tool = container["_tool"] diff --git a/client/ayon_core/hosts/fusion/plugins/publish/collect_instances.py b/client/ayon_core/hosts/fusion/plugins/publish/collect_instances.py index a0131248e8..2cbd4d82f4 100644 --- a/client/ayon_core/hosts/fusion/plugins/publish/collect_instances.py +++ b/client/ayon_core/hosts/fusion/plugins/publish/collect_instances.py @@ -26,7 +26,7 @@ class CollectInstanceData(pyblish.api.InstancePlugin): instance.data["frame_range_source"] = frame_range_source # get asset frame ranges to all instances - # render family instances `asset_db` render target + # render product type instances `asset_db` render target start = context.data["frameStart"] end = context.data["frameEnd"] handle_start = context.data["handleStart"] @@ -34,7 +34,7 @@ class CollectInstanceData(pyblish.api.InstancePlugin): start_with_handle = start - handle_start end_with_handle = end + handle_end - # conditions for render family instances + # conditions for render product type instances if frame_range_source == "render_range": # set comp render frame ranges start = context.data["renderFrameStart"] @@ -70,11 +70,11 @@ class CollectInstanceData(pyblish.api.InstancePlugin): end_with_handle = frame # Include start and end render frame in label - subset = instance.data["subset"] + product_name = instance.data["productName"] label = ( - "{subset} ({start}-{end}) [{handle_start}-{handle_end}]" + "{product_name} ({start}-{end}) [{handle_start}-{handle_end}]" ).format( - subset=subset, + product_name=product_name, start=int(start), end=int(end), handle_start=int(handle_start), diff --git a/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py b/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py index f8870da1c5..36102d02cb 100644 --- a/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py @@ -49,31 +49,32 @@ class CollectFusionRender( if not inst.data.get("active", True): continue - family = inst.data["family"] - if family not in ["render", "image"]: + product_type = inst.data["productType"] + if product_type not in ["render", "image"]: continue task_name = context.data["task"] tool = inst.data["transientData"]["tool"] instance_families = inst.data.get("families", []) - subset_name = inst.data["subset"] + product_name = inst.data["productName"] instance = FusionRenderInstance( - family=family, tool=tool, workfileComp=comp, + productType=product_type, + family=product_type, families=instance_families, version=version, time="", source=current_file, label=inst.data["label"], - subset=subset_name, - asset=inst.data["asset"], + productName=product_name, + folderPath=inst.data["folderPath"], task=task_name, attachTo=False, setMembers='', publish=True, - name=subset_name, + name=product_name, resolutionWidth=comp_frame_format_prefs.get("Width"), resolutionHeight=comp_frame_format_prefs.get("Height"), pixelAspect=aspect_x / aspect_y, diff --git a/client/ayon_core/hosts/fusion/plugins/publish/extract_render_local.py b/client/ayon_core/hosts/fusion/plugins/publish/extract_render_local.py index eea232ac29..23a8cdb8a0 100644 --- a/client/ayon_core/hosts/fusion/plugins/publish/extract_render_local.py +++ b/client/ayon_core/hosts/fusion/plugins/publish/extract_render_local.py @@ -72,7 +72,7 @@ class FusionRenderLocal( self.log.info( "Rendered '{nm}' for asset '{ast}' under the task '{tsk}'".format( nm=instance.data["name"], - ast=instance.data["asset"], + ast=instance.data["folderPath"], tsk=instance.data["task"], ) ) diff --git a/client/ayon_core/hosts/fusion/plugins/publish/validate_unique_subsets.py b/client/ayon_core/hosts/fusion/plugins/publish/validate_unique_subsets.py index 619b52077e..939ddbd117 100644 --- a/client/ayon_core/hosts/fusion/plugins/publish/validate_unique_subsets.py +++ b/client/ayon_core/hosts/fusion/plugins/publish/validate_unique_subsets.py @@ -7,7 +7,7 @@ from ayon_core.hosts.fusion.api.action import SelectInvalidAction class ValidateUniqueSubsets(pyblish.api.ContextPlugin): - """Ensure all instances have a unique subset name""" + """Ensure all instances have a unique product name""" order = pyblish.api.ValidatorOrder label = "Validate Unique Subsets" @@ -18,25 +18,31 @@ class ValidateUniqueSubsets(pyblish.api.ContextPlugin): @classmethod def get_invalid(cls, context): - # Collect instances per subset per asset - instances_per_subset_asset = defaultdict(lambda: defaultdict(list)) + # Collect instances per product per folder + instances_per_product_folder = defaultdict(lambda: defaultdict(list)) for instance in context: - asset = instance.data.get("asset", context.data.get("asset")) - subset = instance.data.get("subset", context.data.get("subset")) - instances_per_subset_asset[asset][subset].append(instance) + folder_path = instance.data["folderPath"] + product_name = instance.data["productName"] + instances_per_product_folder[folder_path][product_name].append( + instance + ) # Find which asset + subset combination has more than one instance # Those are considered invalid because they'd integrate to the same # destination. invalid = [] - for asset, instances_per_subset in instances_per_subset_asset.items(): - for subset, instances in instances_per_subset.items(): + for folder_path, instances_per_product in ( + instances_per_product_folder.items() + ): + for product_name, instances in instances_per_product.items(): if len(instances) > 1: cls.log.warning( - "{asset} > {subset} used by more than " - "one instance: {instances}".format( - asset=asset, - subset=subset, + ( + "{folder_path} > {product_name} used by more than " + "one instance: {instances}" + ).format( + folder_path=folder_path, + product_name=product_name, instances=instances ) ) @@ -50,6 +56,7 @@ class ValidateUniqueSubsets(pyblish.api.ContextPlugin): def process(self, context): invalid = self.get_invalid(context) if invalid: - raise PublishValidationError("Multiple instances are set to " - "the same asset > subset.", - title=self.label) + raise PublishValidationError( + "Multiple instances are set to the same folder > product.", + title=self.label + ) diff --git a/client/ayon_core/hosts/harmony/addon.py b/client/ayon_core/hosts/harmony/addon.py index 172a1f104f..476d569415 100644 --- a/client/ayon_core/hosts/harmony/addon.py +++ b/client/ayon_core/hosts/harmony/addon.py @@ -1,16 +1,13 @@ import os -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon HARMONY_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) -class HarmonyAddon(OpenPypeModule, IHostAddon): +class HarmonyAddon(AYONAddon, IHostAddon): name = "harmony" host_name = "harmony" - def initialize(self, module_settings): - self.enabled = True - def add_implementation_envs(self, env, _app): """Modify environments to contain all required for implementation.""" openharmony_path = os.path.join( diff --git a/client/ayon_core/hosts/harmony/api/README.md b/client/ayon_core/hosts/harmony/api/README.md index cdc17b2285..151b2bce9e 100644 --- a/client/ayon_core/hosts/harmony/api/README.md +++ b/client/ayon_core/hosts/harmony/api/README.md @@ -52,7 +52,7 @@ Because Harmony projects are directories, this integration uses `.zip` as work f ### Show Workfiles on launch -You can show the Workfiles app when Harmony launches by setting environment variable `AVALON_HARMONY_WORKFILES_ON_LAUNCH=1`. +You can show the Workfiles app when Harmony launches by setting environment variable `AYON_HARMONY_WORKFILES_ON_LAUNCH=1`. ## Developing @@ -204,7 +204,7 @@ class CreateComposite(harmony.Creator): name = "compositeDefault" label = "Composite" - family = "mindbender.template" + product_type = "mindbender.template" def __init__(self, *args, **kwargs): super(CreateComposite, self).__init__(*args, **kwargs) @@ -212,6 +212,7 @@ class CreateComposite(harmony.Creator): The creator plugin can be configured to use other node types. For example here is a write node creator: ```python +from uuid import uuid4 import ayon_core.hosts.harmony.api as harmony @@ -220,7 +221,7 @@ class CreateRender(harmony.Creator): name = "writeDefault" label = "Write" - family = "mindbender.imagesequence" + product_type = "mindbender.imagesequence" node_type = "WRITE" def __init__(self, *args, **kwargs): @@ -242,6 +243,7 @@ class CreateRender(harmony.Creator): #### Collector Plugin ```python import pyblish.api +from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID import ayon_core.hosts.harmony.api as harmony @@ -252,7 +254,7 @@ class CollectInstances(pyblish.api.ContextPlugin): a composite node and marked with a unique identifier; Identifier: - id (str): "pyblish.avalon.instance" + id (str): "ayon.create.instance" """ label = "Instances" @@ -272,7 +274,7 @@ class CollectInstances(pyblish.api.ContextPlugin): continue # Skip containers. - if "container" in data["id"]: + if data["id"] not in {AYON_INSTANCE_ID, AVALON_INSTANCE_ID}: continue instance = context.create_instance(node.split("/")[-1]) @@ -287,6 +289,7 @@ class CollectInstances(pyblish.api.ContextPlugin): #### Extractor Plugin ```python import os +from uuid import uuid4 import pyblish.api import ayon_core.hosts.harmony.api as harmony @@ -418,6 +421,7 @@ class ExtractImage(pyblish.api.InstancePlugin): #### Loader Plugin ```python import os +from uuid import uuid4 import ayon_core.hosts.harmony.api as harmony @@ -607,11 +611,12 @@ class ImageSequenceLoader(load.LoaderPlugin): self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): node = container.pop("node") + repre_doc = context["representation"] project_name = get_current_project_name() - version = get_version_by_id(project_name, representation["parent"]) + version = get_version_by_id(project_name, repre_doc["parent"]) files = [] for f in version["data"]["files"]: files.append( @@ -628,7 +633,7 @@ class ImageSequenceLoader(load.LoaderPlugin): ) harmony.imprint( - node, {"representation": str(representation["_id"])} + node, {"representation": str(repre_doc["_id"])} ) def remove(self, container): @@ -644,8 +649,8 @@ class ImageSequenceLoader(load.LoaderPlugin): {"function": func, "args": [node]} ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) ``` ## Resources diff --git a/client/ayon_core/hosts/harmony/api/TB_sceneOpened.js b/client/ayon_core/hosts/harmony/api/TB_sceneOpened.js index 1fb0d295e7..cdf60c1aa8 100644 --- a/client/ayon_core/hosts/harmony/api/TB_sceneOpened.js +++ b/client/ayon_core/hosts/harmony/api/TB_sceneOpened.js @@ -349,7 +349,7 @@ function start() { /** hostname or ip of server - should be localhost */ var host = '127.0.0.1'; /** port of the server */ - var port = parseInt(System.getenv('AVALON_HARMONY_PORT')); + var port = parseInt(System.getenv('AYON_HARMONY_PORT')); // Attach the client to the QApplication to preserve. var app = QCoreApplication.instance(); diff --git a/client/ayon_core/hosts/harmony/api/lib.py b/client/ayon_core/hosts/harmony/api/lib.py index 782134c343..bc73e19066 100644 --- a/client/ayon_core/hosts/harmony/api/lib.py +++ b/client/ayon_core/hosts/harmony/api/lib.py @@ -189,14 +189,14 @@ def launch(application_path, *args): install_host(harmony) ProcessContext.port = random.randrange(49152, 65535) - os.environ["AVALON_HARMONY_PORT"] = str(ProcessContext.port) + os.environ["AYON_HARMONY_PORT"] = str(ProcessContext.port) ProcessContext.application_path = application_path # Launch Harmony. setup_startup_scripts() check_libs() - if not os.environ.get("AVALON_HARMONY_WORKFILES_ON_LAUNCH", False): + if not os.environ.get("AYON_HARMONY_WORKFILES_ON_LAUNCH", False): open_empty_workfile() return diff --git a/client/ayon_core/hosts/harmony/api/workio.py b/client/ayon_core/hosts/harmony/api/workio.py index 8df5ede917..1f95148e75 100644 --- a/client/ayon_core/hosts/harmony/api/workio.py +++ b/client/ayon_core/hosts/harmony/api/workio.py @@ -74,4 +74,4 @@ def current_file(): def work_root(session): - return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/") + return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/") diff --git a/client/ayon_core/hosts/harmony/js/loaders/ImageSequenceLoader.js b/client/ayon_core/hosts/harmony/js/loaders/ImageSequenceLoader.js index 25afeff214..ebbd7163f9 100644 --- a/client/ayon_core/hosts/harmony/js/loaders/ImageSequenceLoader.js +++ b/client/ayon_core/hosts/harmony/js/loaders/ImageSequenceLoader.js @@ -88,7 +88,7 @@ ImageSequenceLoader.getUniqueColumnName = function(columnPrefix) { * var args = [ * files, // Files in file sequences. * asset, // Asset name. - * subset, // Subset name. + * productName, // Product name. * startFrame, // Sequence starting frame. * groupId // Unique group ID (uuid4). * ]; @@ -106,7 +106,7 @@ ImageSequenceLoader.prototype.importFiles = function(args) { var doc = $.scn; var files = args[0]; var asset = args[1]; - var subset = args[2]; + var productName = args[2]; var startFrame = args[3]; var groupId = args[4]; var vectorFormat = null; @@ -124,7 +124,7 @@ ImageSequenceLoader.prototype.importFiles = function(args) { var num = 0; var name = ''; do { - name = asset + '_' + (num++) + '_' + subset; + name = asset + '_' + (num++) + '_' + productName; } while (currentGroup.getNodeByName(name) != null); extension = filename.substr(pos+1).toLowerCase(); diff --git a/client/ayon_core/hosts/harmony/js/loaders/TemplateLoader.js b/client/ayon_core/hosts/harmony/js/loaders/TemplateLoader.js index 06ef1671ea..78167fcb39 100644 --- a/client/ayon_core/hosts/harmony/js/loaders/TemplateLoader.js +++ b/client/ayon_core/hosts/harmony/js/loaders/TemplateLoader.js @@ -31,7 +31,7 @@ var TemplateLoader = function() {}; * var args = [ * templatePath, // Path to tpl file. * assetName, // Asset name. - * subsetName, // Subset name. + * productName, // Product name. * groupId // unique ID (uuid4) * ]; */ @@ -39,7 +39,7 @@ TemplateLoader.prototype.loadContainer = function(args) { var doc = $.scn; var templatePath = args[0]; var assetName = args[1]; - var subset = args[2]; + var productName = args[2]; var groupId = args[3]; // Get the current group @@ -62,7 +62,7 @@ TemplateLoader.prototype.loadContainer = function(args) { var num = 0; var containerGroupName = ''; do { - containerGroupName = assetName + '_' + (num++) + '_' + subset; + containerGroupName = assetName + '_' + (num++) + '_' + productName; } while (currentGroup.getNodeByName(containerGroupName) != null); // import the template diff --git a/client/ayon_core/hosts/harmony/plugins/create/create_farm_render.py b/client/ayon_core/hosts/harmony/plugins/create/create_farm_render.py index 6b19764181..16c403de6a 100644 --- a/client/ayon_core/hosts/harmony/plugins/create/create_farm_render.py +++ b/client/ayon_core/hosts/harmony/plugins/create/create_farm_render.py @@ -9,7 +9,7 @@ class CreateFarmRender(plugin.Creator): name = "renderDefault" label = "Render on Farm" - family = "renderFarm" + product_type = "renderFarm" node_type = "WRITE" def __init__(self, *args, **kwargs): diff --git a/client/ayon_core/hosts/harmony/plugins/create/create_render.py b/client/ayon_core/hosts/harmony/plugins/create/create_render.py index 0a2cd33551..23e02bd8a5 100644 --- a/client/ayon_core/hosts/harmony/plugins/create/create_render.py +++ b/client/ayon_core/hosts/harmony/plugins/create/create_render.py @@ -9,7 +9,7 @@ class CreateRender(plugin.Creator): name = "renderDefault" label = "Render" - family = "render" + product_type = "render" node_type = "WRITE" def __init__(self, *args, **kwargs): diff --git a/client/ayon_core/hosts/harmony/plugins/create/create_template.py b/client/ayon_core/hosts/harmony/plugins/create/create_template.py index 4f3fd85f00..c16e429436 100644 --- a/client/ayon_core/hosts/harmony/plugins/create/create_template.py +++ b/client/ayon_core/hosts/harmony/plugins/create/create_template.py @@ -6,7 +6,7 @@ class CreateTemplate(plugin.Creator): name = "templateDefault" label = "Template" - family = "harmony.template" + product_type = "harmony.template" def __init__(self, *args, **kwargs): super(CreateTemplate, self).__init__(*args, **kwargs) diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_audio.py b/client/ayon_core/hosts/harmony/plugins/load/load_audio.py index 14389166d7..b73c82197a 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_audio.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_audio.py @@ -45,17 +45,17 @@ class ImportAudioLoader(load.LoaderPlugin): {"function": func, "args": [context["subset"]["name"], wav_file]} ) - subset_name = context["subset"]["name"] + product_name = context["subset"]["name"] return harmony.containerise( - subset_name, + product_name, namespace, - subset_name, + product_name, context, self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): pass def remove(self, container): diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_background.py b/client/ayon_core/hosts/harmony/plugins/load/load_background.py index 1c61cfa7a4..bf454a9ec7 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_background.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_background.py @@ -254,7 +254,7 @@ class BackgroundLoader(load.LoaderPlugin): bg_folder = os.path.dirname(path) - subset_name = context["subset"]["name"] + product_name = context["subset"]["name"] # read_node_name += "_{}".format(uuid.uuid4()) container_nodes = [] @@ -272,16 +272,17 @@ class BackgroundLoader(load.LoaderPlugin): container_nodes.append(read_node) return harmony.containerise( - subset_name, + product_name, namespace, - subset_name, + product_name, context, self.__class__.__name__, nodes=container_nodes ) - def update(self, container, representation): - path = get_representation_path(representation) + def update(self, container, context): + repre_doc = context["representation"] + path = get_representation_path(repre_doc) with open(path) as json_file: data = json.load(json_file) @@ -301,7 +302,7 @@ class BackgroundLoader(load.LoaderPlugin): print(container) - is_latest = is_representation_from_latest(representation) + is_latest = is_representation_from_latest(repre_doc) for layer in sorted(layers): file_to_import = [ os.path.join(bg_folder, layer).replace("\\", "/") @@ -351,8 +352,11 @@ class BackgroundLoader(load.LoaderPlugin): harmony.send({"function": func, "args": [node, "red"]}) harmony.imprint( - container['name'], {"representation": str(representation["_id"]), - "nodes": container['nodes']} + container['name'], + { + "representation": str(repre_doc["_id"]), + "nodes": container["nodes"] + } ) def remove(self, container): @@ -369,5 +373,5 @@ class BackgroundLoader(load.LoaderPlugin): ) harmony.imprint(container['name'], {}, remove=True) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_imagesequence.py b/client/ayon_core/hosts/harmony/plugins/load/load_imagesequence.py index 4d87272de8..60b90fe42d 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_imagesequence.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_imagesequence.py @@ -47,7 +47,7 @@ class ImageSequenceLoader(load.LoaderPlugin): files.append(fname.parent.joinpath(remainder[0]).as_posix()) asset = context["asset"]["name"] - subset = context["subset"]["name"] + product_name = context["subset"]["name"] group_id = str(uuid.uuid4()) read_node = harmony.send( @@ -56,7 +56,7 @@ class ImageSequenceLoader(load.LoaderPlugin): "args": [ files, asset, - subset, + product_name, 1, group_id ] @@ -64,7 +64,7 @@ class ImageSequenceLoader(load.LoaderPlugin): )["result"] return harmony.containerise( - f"{asset}_{subset}", + f"{asset}_{product_name}", namespace, read_node, context, @@ -72,18 +72,19 @@ class ImageSequenceLoader(load.LoaderPlugin): nodes=[read_node] ) - def update(self, container, representation): + def update(self, container, context): """Update loaded containers. Args: container (dict): Container data. - representation (dict): Representation data. + context (dict): Representation context data. """ self_name = self.__class__.__name__ node = container.get("nodes").pop() - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) collections, remainder = clique.assemble( os.listdir(os.path.dirname(path)) ) @@ -110,7 +111,7 @@ class ImageSequenceLoader(load.LoaderPlugin): ) # Colour node. - if is_representation_from_latest(representation): + if is_representation_from_latest(repre_doc): harmony.send( { "function": "PypeHarmony.setColor", @@ -124,7 +125,7 @@ class ImageSequenceLoader(load.LoaderPlugin): }) harmony.imprint( - node, {"representation": str(representation["_id"])} + node, {"representation": str(repre_doc["_id"])} ) def remove(self, container): @@ -140,6 +141,6 @@ class ImageSequenceLoader(load.LoaderPlugin): ) harmony.imprint(node, {}, remove=True) - def switch(self, container, representation): + def switch(self, container, context): """Switch loaded representations.""" - self.update(container, representation) + self.update(container, context) diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_palette.py b/client/ayon_core/hosts/harmony/plugins/load/load_palette.py index aa5894e026..f9ce888f93 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_palette.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_palette.py @@ -26,15 +26,17 @@ class ImportPaletteLoader(load.LoaderPlugin): self.__class__.__name__ ) - def load_palette(self, representation): - subset_name = representation["context"]["subset"] - name = subset_name.replace("palette", "") + def load_palette(self, context): + subset_doc = context["subset"] + repre_doc = context["representation"] + product_name = subset_doc["name"] + name = product_name.replace("palette", "") # Overwrite palette on disk. scene_path = harmony.send( {"function": "scene.currentProjectPath"} )["result"] - src = get_representation_path(representation) + src = get_representation_path(repre_doc) dst = os.path.join( scene_path, "palette-library", @@ -44,7 +46,7 @@ class ImportPaletteLoader(load.LoaderPlugin): harmony.save_scene() - msg = "Updated {}.".format(subset_name) + msg = "Updated {}.".format(product_name) msg += " You need to reload the scene to see the changes.\n" msg += "Please save workfile when ready and use Workfiles " msg += "to reopen it." @@ -59,13 +61,14 @@ class ImportPaletteLoader(load.LoaderPlugin): def remove(self, container): harmony.remove(container["name"]) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): self.remove(container) - name = self.load_palette(representation) + name = self.load_palette(context) - container["representation"] = str(representation["_id"]) + repre_doc = context["representation"] + container["representation"] = str(repre_doc["_id"]) container["name"] = name harmony.imprint(name, container) diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_template.py b/client/ayon_core/hosts/harmony/plugins/load/load_template.py index d26f148c09..e981340c68 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_template.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_template.py @@ -70,19 +70,20 @@ class TemplateLoader(load.LoaderPlugin): self_name ) - def update(self, container, representation): + def update(self, container, context): """Update loaded containers. Args: container (dict): Container data. - representation (dict): Representation data. + context (dict): Representation context data. """ node_name = container["name"] node = harmony.find_node_by_name(node_name, "GROUP") self_name = self.__class__.__name__ - if is_representation_from_latest(representation): + repre_doc = context["representation"] + if is_representation_from_latest(repre_doc): self._set_green(node) else: self._set_red(node) @@ -110,7 +111,7 @@ class TemplateLoader(load.LoaderPlugin): None, container["data"]) harmony.imprint( - node, {"representation": str(representation["_id"])} + node, {"representation": str(repre_doc["_id"])} ) def remove(self, container): @@ -125,9 +126,9 @@ class TemplateLoader(load.LoaderPlugin): {"function": "PypeHarmony.deleteNode", "args": [node]} ) - def switch(self, container, representation): + def switch(self, container, context): """Switch representation containers.""" - self.update(container, representation) + self.update(container, context) def _set_green(self, node): """Set node color to green `rgba(0, 255, 0, 255)`.""" diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_template_workfile.py b/client/ayon_core/hosts/harmony/plugins/load/load_template_workfile.py index 0ea46f8f67..1b127c5bc4 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_template_workfile.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_template_workfile.py @@ -40,17 +40,17 @@ class ImportTemplateLoader(load.LoaderPlugin): shutil.rmtree(temp_dir) - subset_name = context["subset"]["name"] + product_name = context["subset"]["name"] return harmony.containerise( - subset_name, + product_name, namespace, - subset_name, + product_name, context, self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): pass def remove(self, container): diff --git a/client/ayon_core/hosts/harmony/plugins/publish/collect_farm_render.py b/client/ayon_core/hosts/harmony/plugins/publish/collect_farm_render.py index faeff7bddd..156e2ac6ba 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/collect_farm_render.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/collect_farm_render.py @@ -80,7 +80,7 @@ class CollectFarmRender(publish.AbstractCollectRender): for frame in range(start, end + 1): expected_files.append( path / "{}-{}.{}".format( - render_instance.subset, + render_instance.productName, str(frame).rjust(int(info[2]) + 1, "0"), ext ) @@ -89,7 +89,7 @@ class CollectFarmRender(publish.AbstractCollectRender): return expected_files def get_instances(self, context): - """Get instances per Write node in `renderFarm` family.""" + """Get instances per Write node in `renderFarm` product type.""" version = None if self.sync_workfile_version: version = context.data["version"] @@ -98,7 +98,7 @@ class CollectFarmRender(publish.AbstractCollectRender): self_name = self.__class__.__name__ - asset_name = context.data["asset"] + folder_path = context.data["folderPath"] for node in context.data["allNodes"]: data = harmony.read(node) @@ -111,7 +111,10 @@ class CollectFarmRender(publish.AbstractCollectRender): if "container" in data["id"]: continue - if data["family"] != "renderFarm": + product_type = data.get("productType") + if product_type is None: + product_type = data.get("family") + if product_type != "renderFarm": continue # 0 - filename / 1 - type / 2 - zeros / 3 - start / 4 - enabled @@ -124,15 +127,14 @@ class CollectFarmRender(publish.AbstractCollectRender): # TODO: handle pixel aspect and frame step # TODO: set Deadline stuff (pools, priority, etc. by presets) - # because of using 'renderFarm' as a family, replace 'Farm' with - # capitalized task name - issue of avalon-core Creator app - subset_name = node.split("/")[1] - task_name = context.data["anatomyData"]["task"][ - "name"].capitalize() + # because of using 'renderFarm' as a product type, replace 'Farm' + # with capitalized task name - issue of Creator tool + product_name = node.split("/")[1] + task_name = context.data["task"].capitalize() replace_str = "" - if task_name.lower() not in subset_name.lower(): + if task_name.lower() not in product_name.lower(): replace_str = task_name - subset_name = subset_name.replace( + product_name = product_name.replace( 'Farm', replace_str) @@ -141,8 +143,8 @@ class CollectFarmRender(publish.AbstractCollectRender): time=get_formatted_current_time(), source=context.data["currentFile"], label=node.split("/")[1], - subset=subset_name, - asset=asset_name, + productName=product_name, + folderPath=folder_path, task=task_name, attachTo=False, setMembers=[node], @@ -151,6 +153,7 @@ class CollectFarmRender(publish.AbstractCollectRender): priority=50, name=node.split("/")[1], + productType="render.farm", family="render.farm", families=["render.farm"], farm=True, diff --git a/client/ayon_core/hosts/harmony/plugins/publish/collect_instances.py b/client/ayon_core/hosts/harmony/plugins/publish/collect_instances.py index 3eb689aff6..5aad7d4751 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/collect_instances.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/collect_instances.py @@ -13,13 +13,13 @@ class CollectInstances(pyblish.api.ContextPlugin): a composite node and marked with a unique identifier. Identifier: - id (str): "pyblish.avalon.instance" + id (str): "ayon.create.instance" """ label = "Instances" order = pyblish.api.CollectorOrder hosts = ["harmony"] - families_mapping = { + product_type_mapping = { "render": ["review", "ftrack"], "harmony.template": [], "palette": ["palette", "ftrack"] @@ -49,8 +49,14 @@ class CollectInstances(pyblish.api.ContextPlugin): if "container" in data["id"]: continue - # skip render farm family as it is collected separately - if data["family"] == "renderFarm": + product_type = data.get("productType") + if product_type is None: + product_type = data["family"] + data["productType"] = product_type + data["family"] = product_type + + # skip render farm product type as it is collected separately + if product_type == "renderFarm": continue instance = context.create_instance(node.split("/")[-1]) @@ -59,11 +65,14 @@ class CollectInstances(pyblish.api.ContextPlugin): instance.data["publish"] = harmony.send( {"function": "node.getEnable", "args": [node]} )["result"] - instance.data["families"] = self.families_mapping[data["family"]] + + families = [product_type] + families.extend(self.product_type_mapping[product_type]) + instance.data["families"] = families # If set in plugin, pair the scene Version in ftrack with # thumbnails and review media. - if (self.pair_media and instance.data["family"] == "scene"): + if (self.pair_media and product_type == "scene"): context.data["scene_instance"] = instance # Produce diagnostic message for any graphical diff --git a/client/ayon_core/hosts/harmony/plugins/publish/collect_palettes.py b/client/ayon_core/hosts/harmony/plugins/publish/collect_palettes.py index 9343fab86d..9e0b500663 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/collect_palettes.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/collect_palettes.py @@ -31,16 +31,18 @@ class CollectPalettes(pyblish.api.ContextPlugin): if (not any([re.search(pattern, task_name) for pattern in self.allowed_tasks])): return - asset_name = context.data["asset"] + folder_path = context.data["folderPath"] + product_type = "harmony.palette" for name, id in palettes.items(): instance = context.create_instance(name) instance.data.update({ "id": id, - "family": "harmony.palette", - 'families': [], - "asset": asset_name, - "subset": "{}{}".format("palette", name) + "productType": product_type, + "family": product_type, + "families": [product_type], + "folderPath": folder_path, + "productName": "{}{}".format("palette", name) }) self.log.info( "Created instance:\n" + json.dumps( diff --git a/client/ayon_core/hosts/harmony/plugins/publish/collect_workfile.py b/client/ayon_core/hosts/harmony/plugins/publish/collect_workfile.py index 4be2a0fc26..b1010cfb57 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/collect_workfile.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/collect_workfile.py @@ -3,7 +3,7 @@ import os import pyblish.api -from ayon_core.pipeline.create import get_subset_name +from ayon_core.pipeline.create import get_product_name class CollectWorkfile(pyblish.api.ContextPlugin): @@ -15,26 +15,27 @@ class CollectWorkfile(pyblish.api.ContextPlugin): def process(self, context): """Plugin entry point.""" - family = "workfile" + product_type = "workfile" basename = os.path.basename(context.data["currentFile"]) - subset = get_subset_name( - family, - "", - context.data["anatomyData"]["task"]["name"], + product_name = get_product_name( + context.data["projectName"], context.data["assetEntity"], - context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"], + context.data["task"], + context.data["hostName"], + product_type, + "", project_settings=context.data["project_settings"] ) # Create instance - instance = context.create_instance(subset) + instance = context.create_instance(product_name) instance.data.update({ - "subset": subset, + "productName": product_name, "label": basename, "name": basename, - "family": family, - "families": [family], + "productType": product_type, + "family": product_type, + "families": [product_type], "representations": [], - "asset": context.data["asset"] + "folderPath": context.data["folderPath"] }) diff --git a/client/ayon_core/hosts/harmony/plugins/publish/extract_template.py b/client/ayon_core/hosts/harmony/plugins/publish/extract_template.py index c481a34454..b2c7fa8174 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/extract_template.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/extract_template.py @@ -75,7 +75,9 @@ class ExtractTemplate(publish.Extractor): instance.data["representations"] = [representation] instance.data["version_name"] = "{}_{}".format( - instance.data["subset"], instance.context.data["task"]) + instance.data["productName"], + instance.context.data["task"] + ) def get_backdrops(self, node: str) -> list: """Get backdrops for the node. diff --git a/client/ayon_core/hosts/harmony/plugins/publish/help/validate_instances.xml b/client/ayon_core/hosts/harmony/plugins/publish/help/validate_instances.xml index 3b040e8ea8..67ad7e2d21 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/help/validate_instances.xml +++ b/client/ayon_core/hosts/harmony/plugins/publish/help/validate_instances.xml @@ -3,9 +3,9 @@ Subset context -## Invalid subset context +## Invalid product context -Asset name found '{found}' in subsets, expected '{expected}'. +Asset name found '{found}' in products, expected '{expected}'. ### How to repair? @@ -19,7 +19,7 @@ If this is unwanted, close workfile and open again, that way different asset val ### __Detailed Info__ (optional) This might happen if you are reuse old workfile and open it in different context. -(Eg. you created subset "renderCompositingDefault" from asset "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing subset for "Robot" asset stayed in the workfile.) +(Eg. you created product "renderCompositingDefault" from asset "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing product for "Robot" asset stayed in the workfile.) \ No newline at end of file diff --git a/client/ayon_core/hosts/harmony/plugins/publish/validate_instances.py b/client/ayon_core/hosts/harmony/plugins/publish/validate_instances.py index a57a863d6f..fdba834de6 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/validate_instances.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/validate_instances.py @@ -27,9 +27,10 @@ class ValidateInstanceRepair(pyblish.api.Action): # Apply pyblish.logic to get the instances for the plug-in instances = pyblish.api.instances_by_plugin(failed, plugin) + folder_path = get_current_asset_name() for instance in instances: data = harmony.read(instance.data["setMembers"][0]) - data["asset"] = get_current_asset_name() + data["folderPath"] = folder_path harmony.imprint(instance.data["setMembers"][0], data) @@ -42,7 +43,7 @@ class ValidateInstance(pyblish.api.InstancePlugin): order = ValidateContentsOrder def process(self, instance): - instance_asset = instance.data["asset"] + instance_asset = instance.data["folderPath"] current_asset = get_current_asset_name() msg = ( "Instance asset is not the same as current asset:" diff --git a/client/ayon_core/hosts/harmony/plugins/publish/validate_scene_settings.py b/client/ayon_core/hosts/harmony/plugins/publish/validate_scene_settings.py index 0cf96e70b0..6d46fbcd33 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/validate_scene_settings.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/validate_scene_settings.py @@ -77,7 +77,7 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin): expected_settings.pop("resolutionWidth") expected_settings.pop("resolutionHeight") - if (any(re.search(pattern, os.getenv('AVALON_TASK')) + if (any(re.search(pattern, os.getenv('AYON_TASK_NAME')) for pattern in self.skip_timelines_check)): self.log.info("Skipping frames check because of " "task name and pattern {}".format( diff --git a/client/ayon_core/hosts/hiero/addon.py b/client/ayon_core/hosts/hiero/addon.py index 447700e2e1..f612493ca1 100644 --- a/client/ayon_core/hosts/hiero/addon.py +++ b/client/ayon_core/hosts/hiero/addon.py @@ -1,17 +1,14 @@ import os import platform -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon HIERO_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class HieroAddon(OpenPypeModule, IHostAddon): +class HieroAddon(AYONAddon, IHostAddon): name = "hiero" host_name = "hiero" - def initialize(self, module_settings): - self.enabled = True - def add_implementation_envs(self, env, _app): # Add requirements to HIERO_PLUGIN_PATH new_hiero_paths = [ diff --git a/client/ayon_core/hosts/hiero/api/lib.py b/client/ayon_core/hosts/hiero/api/lib.py index 24ff76d30b..ef0f9edca9 100644 --- a/client/ayon_core/hosts/hiero/api/lib.py +++ b/client/ayon_core/hosts/hiero/api/lib.py @@ -22,7 +22,12 @@ except ImportError: from ayon_core.client import get_project from ayon_core.settings import get_project_settings -from ayon_core.pipeline import Anatomy, get_current_project_name +from ayon_core.pipeline import ( + Anatomy, + get_current_project_name, + AYON_INSTANCE_ID, + AVALON_INSTANCE_ID, +) from ayon_core.pipeline.load import filter_containers from ayon_core.lib import Logger from . import tags @@ -584,8 +589,8 @@ def imprint(track_item, data=None): Examples: data = { 'asset': 'sq020sh0280', - 'family': 'render', - 'subset': 'subsetMain' + 'productType': 'render', + 'productName': 'productMain' } """ data = data or {} @@ -1217,7 +1222,9 @@ def sync_clip_name_to_data_asset(track_items_list): # ignore if no data on the clip or not publish instance if not data: continue - if data.get("id") != "pyblish.avalon.instance": + if data.get("id") not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: continue # fix data if wrong name diff --git a/client/ayon_core/hosts/hiero/api/pipeline.py b/client/ayon_core/hosts/hiero/api/pipeline.py index 1897628678..a9ba2e4df3 100644 --- a/client/ayon_core/hosts/hiero/api/pipeline.py +++ b/client/ayon_core/hosts/hiero/api/pipeline.py @@ -15,6 +15,7 @@ from ayon_core.pipeline import ( deregister_creator_plugin_path, deregister_loader_plugin_path, AVALON_CONTAINER_ID, + AYON_CONTAINER_ID, ) from ayon_core.tools.utils import host_tools from . import lib, menu, events @@ -158,7 +159,9 @@ def parse_container(item, validate=True): def data_to_container(item, data): if ( not data - or data.get("id") != "pyblish.avalon.container" + or data.get("id") not in { + AYON_CONTAINER_ID, AVALON_CONTAINER_ID + } ): return diff --git a/client/ayon_core/hosts/hiero/api/plugin.py b/client/ayon_core/hosts/hiero/api/plugin.py index 574865488f..51601a7ee0 100644 --- a/client/ayon_core/hosts/hiero/api/plugin.py +++ b/client/ayon_core/hosts/hiero/api/plugin.py @@ -363,7 +363,7 @@ class SequenceLoader(LoaderPlugin): ): pass - def update(self, container, representation): + def update(self, container, context): """Update an existing `container` """ pass @@ -439,7 +439,7 @@ class ClipLoader: """ Gets context and convert it to self.data data structure: { - "name": "assetName_subsetName_representationName" + "name": "assetName_productName_representationName" "path": "path/to/file/created/by/get_repr..", "binPath": "projectBinPath", } @@ -448,10 +448,10 @@ class ClipLoader: repr = self.context["representation"] repr_cntx = repr["context"] asset = str(repr_cntx["asset"]) - subset = str(repr_cntx["subset"]) + product_name = str(repr_cntx["subset"]) representation = str(repr_cntx["representation"]) self.data["clip_name"] = self.clip_name_template.format(**repr_cntx) - self.data["track_name"] = "_".join([subset, representation]) + self.data["track_name"] = "_".join([product_name, representation]) self.data["versionData"] = self.context["version"]["data"] # gets file path file = get_representation_path_from_context(self.context) @@ -659,9 +659,9 @@ class PublishClip: rename_default = False hierarchy_default = "{_folder_}/{_sequence_}/{_track_}" clip_name_default = "shot_{_trackIndex_:0>3}_{_clipIndex_:0>4}" - subset_name_default = "" + base_product_name_default = "" review_track_default = "< none >" - subset_family_default = "plate" + product_type_default = "plate" count_from_default = 10 count_steps_default = 10 vertical_sync_default = False @@ -785,10 +785,10 @@ class PublishClip: "countFrom", {}).get("value") or self.count_from_default self.count_steps = self.ui_inputs.get( "countSteps", {}).get("value") or self.count_steps_default - self.subset_name = self.ui_inputs.get( - "subsetName", {}).get("value") or self.subset_name_default - self.subset_family = self.ui_inputs.get( - "subsetFamily", {}).get("value") or self.subset_family_default + self.base_product_name = self.ui_inputs.get( + "productName", {}).get("value") or self.base_product_name_default + self.product_type = self.ui_inputs.get( + "productType", {}).get("value") or self.product_type_default self.vertical_sync = self.ui_inputs.get( "vSyncOn", {}).get("value") or self.vertical_sync_default self.driving_layer = self.ui_inputs.get( @@ -798,12 +798,14 @@ class PublishClip: self.audio = self.ui_inputs.get( "audio", {}).get("value") or False - # build subset name from layer name - if self.subset_name == "": - self.subset_name = self.track_name + # build product name from layer name + if self.base_product_name == "": + self.base_product_name = self.track_name - # create subset for publishing - self.subset = self.subset_family + self.subset_name.capitalize() + # create product for publishing + self.product_name = ( + self.product_type + self.base_product_name.capitalize() + ) def _replace_hash_to_expression(self, name, text): """ Replace hash with number in correct padding. """ @@ -885,14 +887,14 @@ class PublishClip: for (_in, _out), hero_data in self.vertical_clip_match.items(): hero_data.update({"heroTrack": False}) if _in == self.clip_in and _out == self.clip_out: - data_subset = hero_data["subset"] + data_product_name = hero_data["productName"] # add track index in case duplicity of names in hero data - if self.subset in data_subset: - hero_data["subset"] = self.subset + str( + if self.product_name in data_product_name: + hero_data["productName"] = self.product_name + str( self.track_index) - # in case track name and subset name is the same then add - if self.subset_name == self.track_name: - hero_data["subset"] = self.subset + # in case track name and product name is the same then add + if self.base_product_name == self.track_name: + hero_data["productName"] = self.product_name # assign data to return hierarchy data to tag tag_hierarchy_data = hero_data @@ -913,9 +915,9 @@ class PublishClip: "hierarchy": hierarchy_filled, "parents": self.parents, "hierarchyData": hierarchy_formatting_data, - "subset": self.subset, - "family": self.subset_family, - "families": [self.data["family"]] + "productName": self.product_name, + "productType": self.product_type, + "families": [self.product_type, self.data["family"]] } def _convert_to_entity(self, type, template): diff --git a/client/ayon_core/hosts/hiero/api/tags.py b/client/ayon_core/hosts/hiero/api/tags.py index ad7c7e44a1..6491b1f384 100644 --- a/client/ayon_core/hosts/hiero/api/tags.py +++ b/client/ayon_core/hosts/hiero/api/tags.py @@ -28,8 +28,8 @@ def tag_data(): # "note": "Collecting track items to Nuke scripts.", # "icon": "icons:TagNuke.png", # "metadata": { - # "family": "nukescript", - # "subset": "main" + # "productType": "nukescript", + # "productName": "main" # } # }, "Comment": { @@ -37,17 +37,17 @@ def tag_data(): "note": "Comment on a shot.", "icon": "icons:TagComment.png", "metadata": { - "family": "comment", - "subset": "main" + "productType": "comment", + "productName": "main" } }, "FrameMain": { "editable": "1", - "note": "Publishing a frame subset.", + "note": "Publishing a frame product.", "icon": "z_layer_main.png", "metadata": { - "family": "frame", - "subset": "main", + "productType": "frame", + "productName": "main", "format": "png" } } @@ -153,7 +153,7 @@ def add_tags_to_workfile(): "note": task_type, "icon": "icons:TagGood.png", "metadata": { - "family": "task", + "productType": "task", "type": task_type } } @@ -173,7 +173,7 @@ def add_tags_to_workfile(): "path": "icons:TagActor.png" }, "metadata": { - "family": "assetbuild" + "productType": "assetbuild" } } diff --git a/client/ayon_core/hosts/hiero/api/workio.py b/client/ayon_core/hosts/hiero/api/workio.py index 14d9439344..4c2416ca38 100644 --- a/client/ayon_core/hosts/hiero/api/workio.py +++ b/client/ayon_core/hosts/hiero/api/workio.py @@ -70,4 +70,4 @@ def current_file(): def work_root(session): - return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/") + return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/") diff --git a/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py b/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py index ce84a9120e..90ea9ef50f 100644 --- a/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py +++ b/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py @@ -10,7 +10,7 @@ class CreateShotClip(phiero.Creator): """Publishable clip""" label = "Create Publishable Clip" - family = "clip" + product_type = "clip" icon = "film" defaults = ["Main"] @@ -133,19 +133,19 @@ class CreateShotClip(phiero.Creator): "target": "ui", "order": 3, "value": { - "subsetName": { + "productName": { "value": ["", "main", "bg", "fg", "bg", "animatic"], "type": "QComboBox", - "label": "Subset Name", + "label": "pRODUCT Name", "target": "ui", - "toolTip": "chose subset name pattern, if is selected, name of track layer will be used", # noqa + "toolTip": "chose product name pattern, if is selected, name of track layer will be used", # noqa "order": 0}, - "subsetFamily": { + "productType": { "value": ["plate", "take"], "type": "QComboBox", - "label": "Subset Family", - "target": "ui", "toolTip": "What use of this subset is for", # noqa + "label": "Product Type", + "target": "ui", "toolTip": "What use of this product is for", # noqa "order": 1}, "reviewTrack": { "value": ["< none >"] + gui_tracks, @@ -159,7 +159,7 @@ class CreateShotClip(phiero.Creator): "type": "QCheckBox", "label": "Include audio", "target": "tag", - "toolTip": "Process subsets with corresponding audio", # noqa + "toolTip": "Process productS with corresponding audio", # noqa "order": 3}, "sourceResolution": { "value": False, diff --git a/client/ayon_core/hosts/hiero/plugins/load/load_clip.py b/client/ayon_core/hosts/hiero/plugins/load/load_clip.py index d77a28872f..e5ef977c42 100644 --- a/client/ayon_core/hosts/hiero/plugins/load/load_clip.py +++ b/client/ayon_core/hosts/hiero/plugins/load/load_clip.py @@ -14,7 +14,7 @@ import ayon_core.hosts.hiero.api as phiero class LoadClip(phiero.SequenceLoader): - """Load a subset to timeline as clip + """Load a product to timeline as clip Place clip to timeline on its asset origin timings collected during conforming to project @@ -42,7 +42,7 @@ class LoadClip(phiero.SequenceLoader): clip_name_template = "{asset}_{subset}_{representation}" @classmethod - def apply_settings(cls, project_settings, system_settings): + def apply_settings(cls, project_settings): plugin_type_settings = ( project_settings .get("hiero", {}) @@ -54,25 +54,36 @@ class LoadClip(phiero.SequenceLoader): plugin_name = cls.__name__ - plugin_settings = None # Look for plugin settings in host specific settings - if plugin_name in plugin_type_settings: - plugin_settings = plugin_type_settings[plugin_name] - + plugin_settings = plugin_type_settings.get(plugin_name) if not plugin_settings: return print(">>> We have preset for {}".format(plugin_name)) for option, value in plugin_settings.items(): + if option == "representations": + continue + + if option == "product_types": + # TODO remove the key conversion when loaders can filter by + # product types + # convert 'product_types' to 'families' + option = "families" + + elif option == "clip_name_template": + # TODO remove the formatting replacement + value = ( + value + .replace("{folder[name]}", "{asset}") + .replace("{product[name]}", "{subset}") + ) + if option == "enabled" and value is False: print(" - is disabled by preset") - elif option == "representations": - continue else: print(" - setting `{}`: `{}`".format(option, value)) setattr(cls, option, value) - def load(self, context, name, namespace, options): # add clip name template to options options.update({ @@ -135,27 +146,25 @@ class LoadClip(phiero.SequenceLoader): self.__class__.__name__, data_imprint) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """ Updating previously loaded clips """ - + version_doc = context["version"] + repre_doc = context["representation"] # load clip to timeline and get main variables name = container['name'] namespace = container['namespace'] track_item = phiero.get_track_items( track_item_name=namespace).pop() - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) - version_data = version_doc.get("data", {}) version_name = version_doc.get("name", None) colorspace = version_data.get("colorspace", None) object_name = "{}_{}".format(name, namespace) - file = get_representation_path(representation).replace("\\", "/") + file = get_representation_path(repre_doc).replace("\\", "/") clip = track_item.source() # reconnect media to new path @@ -180,7 +189,7 @@ class LoadClip(phiero.SequenceLoader): # add variables related to version context data_imprint.update({ - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), "version": version_name, "colorspace": colorspace, "objectName": object_name diff --git a/client/ayon_core/hosts/hiero/plugins/load/load_effects.py b/client/ayon_core/hosts/hiero/plugins/load/load_effects.py index 809080e87e..9a5e659451 100644 --- a/client/ayon_core/hosts/hiero/plugins/load/load_effects.py +++ b/client/ayon_core/hosts/hiero/plugins/load/load_effects.py @@ -157,19 +157,19 @@ class LoadEffects(load.LoaderPlugin): return loaded - def update(self, container, representation): + def update(self, container, context): """ Updating previously loaded effects """ + version_doc = context["version"] + repre_doc = context["representation"] active_track = container["_item"] - file = get_representation_path(representation).replace("\\", "/") + file = get_representation_path(repre_doc).replace("\\", "/") # get main variables name = container['name'] namespace = container['namespace'] # get timeline in out data - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) version_data = version_doc["data"] clip_in = version_data["clipIn"] clip_out = version_data["clipOut"] @@ -197,7 +197,7 @@ class LoadEffects(load.LoaderPlugin): data_imprint = { "objectName": object_name, "name": name, - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), "children_names": [] } @@ -256,8 +256,8 @@ class LoadEffects(load.LoaderPlugin): else: return input - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): pass diff --git a/client/ayon_core/hosts/hiero/plugins/publish/collect_clip_effects.py b/client/ayon_core/hosts/hiero/plugins/publish/collect_clip_effects.py index d7f646ebc9..32b4864022 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/collect_clip_effects.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/collect_clip_effects.py @@ -1,3 +1,4 @@ +from itertools import product import re import pyblish.api @@ -12,13 +13,13 @@ class CollectClipEffects(pyblish.api.InstancePlugin): effect_categories = [] def process(self, instance): - family = "effect" + product_type = "effect" effects = {} review = instance.data.get("review") review_track_index = instance.context.data.get("reviewTrackIndex") item = instance.data["item"] - if "audio" in instance.data["family"]: + if "audio" in instance.data["productType"]: return # frame range @@ -61,16 +62,16 @@ class CollectClipEffects(pyblish.api.InstancePlugin): if not effects: return - subset = instance.data.get("subset") - effects.update({"assignTo": subset}) + product_name = instance.data.get("productName") + effects.update({"assignTo": product_name}) - subset_split = re.findall(r'[A-Z][^A-Z]*', subset) + product_name_split = re.findall(r'[A-Z][^A-Z]*', product_name) - if len(subset_split) > 0: - root_name = subset.replace(subset_split[0], "") - subset_split.insert(0, root_name.capitalize()) + if len(product_name_split) > 0: + root_name = product_name.replace(product_name_split[0], "") + product_name_split.insert(0, root_name.capitalize()) - subset_split.insert(0, "effect") + product_name_split.insert(0, "effect") effect_categories = { x["name"]: x["effect_classes"] for x in self.effect_categories @@ -104,8 +105,8 @@ class CollectClipEffects(pyblish.api.InstancePlugin): effects_categorized[category]["assignTo"] = effects["assignTo"] for category, effects in effects_categorized.items(): - name = "".join(subset_split) - name += category.capitalize() + product_name = "".join(product_name_split) + product_name += category.capitalize() # create new instance and inherit data data = {} @@ -114,15 +115,17 @@ class CollectClipEffects(pyblish.api.InstancePlugin): continue data[key] = value - # change names - data["subset"] = name - data["family"] = family - data["families"] = [family] - data["name"] = data["subset"] + "_" + data["asset"] - data["label"] = "{} - {}".format( - data['asset'], data["subset"] - ) - data["effects"] = effects + data.update({ + "productName": product_name, + "productType": product_type, + "family": product_type, + "families": [product_type], + "name": product_name + "_" + data["folderPath"], + "label": "{} - {}".format( + data["folderPath"], product_name + ), + "effects": effects, + }) # create new instance _instance = instance.context.create_instance(**data) diff --git a/client/ayon_core/hosts/hiero/plugins/publish/collect_frame_tag_instances.py b/client/ayon_core/hosts/hiero/plugins/publish/collect_frame_tag_instances.py index b981d89eef..d73b5d4667 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/collect_frame_tag_instances.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/collect_frame_tag_instances.py @@ -13,8 +13,8 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin): Tag is expected to have metadata: { - "family": "frame" - "subset": "main" + "productType": "frame" + "productName": "main" } """ @@ -26,14 +26,14 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin): self._context = context # collect all sequence tags - subset_data = self._create_frame_subset_data_sequence(context) + product_data = self._create_frame_product_data_sequence(context) - self.log.debug("__ subset_data: {}".format( - pformat(subset_data) + self.log.debug("__ product_data: {}".format( + pformat(product_data) )) # create instances - self._create_instances(subset_data) + self._create_instances(product_data) def _get_tag_data(self, tag): data = {} @@ -66,7 +66,7 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin): return data - def _create_frame_subset_data_sequence(self, context): + def _create_frame_product_data_sequence(self, context): sequence_tags = [] sequence = context.data["activeTimeline"] @@ -87,10 +87,13 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin): if not tag_data: continue - if "family" not in tag_data: + product_type = tag_data.get("productType") + if product_type is None: + product_type = tag_data.get("family") + if not product_type: continue - if tag_data["family"] != "frame": + if product_type != "frame": continue sequence_tags.append(tag_data) @@ -99,10 +102,10 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin): pformat(sequence_tags) )) - # first collect all available subset tag frames - subset_data = {} + # first collect all available product tag frames + product_data = {} context_asset_doc = context.data["assetEntity"] - context_asset_name = get_asset_name_identifier(context_asset_doc) + context_folder_path = get_asset_name_identifier(context_asset_doc) for tag_data in sequence_tags: frame = int(tag_data["start"]) @@ -110,33 +113,37 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin): if frame not in publish_frames: continue - subset = tag_data["subset"] + product_name = tag_data.get("productName") + if product_name is None: + product_name = tag_data["subset"] - if subset in subset_data: - # update existing subset key - subset_data[subset]["frames"].append(frame) + if product_name in product_data: + # update existing product key + product_data[product_name]["frames"].append(frame) else: - # create new subset key - subset_data[subset] = { + # create new product key + product_data[product_name] = { "frames": [frame], "format": tag_data["format"], - "asset": context_asset_name + "folderPath": context_folder_path } - return subset_data + return product_data - def _create_instances(self, subset_data): - # create instance per subset - for subset_name, subset_data in subset_data.items(): - name = "frame" + subset_name.title() + def _create_instances(self, product_data): + # create instance per product + product_type = "image" + for product_name, product_data in product_data.items(): + name = "frame" + product_name.title() data = { "name": name, - "label": "{} {}".format(name, subset_data["frames"]), - "family": "image", - "families": ["frame"], - "asset": subset_data["asset"], - "subset": name, - "format": subset_data["format"], - "frames": subset_data["frames"] + "label": "{} {}".format(name, product_data["frames"]), + "productType": product_type, + "family": product_type, + "families": [product_type, "frame"], + "folderPath": product_data["folderPath"], + "productName": name, + "format": product_data["format"], + "frames": product_data["frames"] } self._context.create_instance(**data) diff --git a/client/ayon_core/hosts/hiero/plugins/publish/collect_tag_tasks.py b/client/ayon_core/hosts/hiero/plugins/publish/collect_tag_tasks.py index 27968060e1..35a8c02cc0 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/collect_tag_tasks.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/collect_tag_tasks.py @@ -16,10 +16,12 @@ class CollectClipTagTasks(api.InstancePlugin): tasks = {} for tag in tags: t_metadata = dict(tag.metadata()) - t_family = t_metadata.get("tag.family", "") + t_product_type = t_metadata.get("tag.productType") + if t_product_type is None: + t_product_type = t_metadata.get("tag.family", "") - # gets only task family tags and collect labels - if "task" in t_family: + # gets only task product type tags and collect labels + if "task" in t_product_type: t_task_name = t_metadata.get("tag.label", "") t_task_type = t_metadata.get("tag.type", "") tasks[t_task_name] = {"type": t_task_type} diff --git a/client/ayon_core/hosts/hiero/plugins/publish/extract_clip_effects.py b/client/ayon_core/hosts/hiero/plugins/publish/extract_clip_effects.py index afff41fc74..25b52968fa 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/extract_clip_effects.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/extract_clip_effects.py @@ -21,8 +21,8 @@ class ExtractClipEffects(publish.Extractor): if not effects: return - subset = instance.data.get("subset") - family = instance.data["family"] + product_name = instance.data.get("productName") + product_type = instance.data["productType"] self.log.debug("creating staging dir") staging_dir = self.staging_dir(instance) @@ -32,7 +32,7 @@ class ExtractClipEffects(publish.Extractor): instance.data["transfers"] = list() ext = "json" - file = subset + "." + ext + file = product_name + "." + ext # when instance is created during collection part resources_dir = instance.data["resourcesDir"] @@ -57,7 +57,7 @@ class ExtractClipEffects(publish.Extractor): "sourceStart", "sourceStartH", "sourceEnd", "sourceEndH", "frameStart", "frameEnd", "clipIn", "clipOut", "clipInH", "clipOutH", - "asset", "version" + "folderPath", "version" ] # pass data to version @@ -68,8 +68,10 @@ class ExtractClipEffects(publish.Extractor): version_data.update({ "colorspace": item.sourceMediaColourTransform(), "colorspaceScript": instance.context.data["colorspace"], - "families": [family, "plate"], - "subset": subset, + "families": [product_type, "plate"], + # TODO find out if 'subset' is needed (and 'productName') + "subset": product_name, + "productName": product_name, "fps": instance.context.data["fps"] }) instance.data["versionData"] = version_data @@ -77,7 +79,7 @@ class ExtractClipEffects(publish.Extractor): representation = { 'files': file, 'stagingDir': staging_dir, - 'name': family + ext.title(), + 'name': product_type + ext.title(), 'ext': ext } instance.data["representations"].append(representation) diff --git a/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py b/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py index e41ca74320..911b96c280 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py @@ -1,5 +1,6 @@ import pyblish +from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID from ayon_core.pipeline.editorial import is_overlapping_otio_ranges from ayon_core.hosts.hiero import api as phiero @@ -56,7 +57,9 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not tag_data: continue - if tag_data.get("id") != "pyblish.avalon.instance": + if tag_data.get("id") not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: continue # get clips subtracks and anotations @@ -84,22 +87,47 @@ class PrecollectInstances(pyblish.api.ContextPlugin): asset, asset_name = self._get_asset_data(tag_data) - subset = tag_data["subset"] + product_name = tag_data.get("productName") + if product_name is None: + product_name = tag_data["subset"] - # insert family into families families = [str(f) for f in tag_data["families"]] # form label label = "{} -".format(asset) if asset_name != clip_name: label += " ({})".format(clip_name) - label += " {}".format(subset) + label += " {}".format(product_name) + + # TODO: remove backward compatibility + product_name = tag_data.get("productName") + if product_name is None: + # backward compatibility: subset -> productName + product_name = tag_data.get("subset") + + # backward compatibility: product_name should not be missing + if not product_name: + self.log.error( + "Product name is not defined for: {}".format(asset)) + + # TODO: remove backward compatibility + product_type = tag_data.get("productType") + if product_type is None: + # backward compatibility: family -> productType + product_type = tag_data.get("family") + + # backward compatibility: product_type should not be missing + if not product_type: + self.log.error( + "Product type is not defined for: {}".format(asset)) data.update({ - "name": "{}_{}".format(asset, subset), + "name": "{}_{}".format(asset, product_name), "label": label, - "asset": asset, + "folderPath": asset, "asset_name": asset_name, + "productName": product_name, + "productType": product_type, "item": track_item, "families": families, "publish": tag_data["publish"], @@ -143,7 +171,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not with_audio: continue - # create audio subset instance + # create audio product instance self.create_audio_instance(context, **data) # add audioReview attribute to plate instance data @@ -177,7 +205,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): }) def create_shot_instance(self, context, **data): - subset = "shotMain" + product_name = "shotMain" master_layer = data.get("heroTrack") hierarchy_data = data.get("hierarchyData") item = data.get("item") @@ -189,24 +217,24 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not hierarchy_data: return - asset = data["asset"] + asset = data["folderPath"] asset_name = data["asset_name"] - # insert family into families - family = "shot" + product_type = "shot" # form label label = "{} -".format(asset) if asset_name != clip_name: label += " ({}) ".format(clip_name) - label += " {}".format(subset) + label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, subset), + "name": "{}_{}".format(asset, product_name), "label": label, - "subset": subset, - "family": family, - "families": [] + "productName": product_name, + "productType": product_type, + "family": product_type, + "families": [product_type] }) instance = context.create_instance(**data) @@ -235,13 +263,12 @@ class PrecollectInstances(pyblish.api.ContextPlugin): return folder_path, asset_name def create_audio_instance(self, context, **data): - subset = "audioMain" + product_name = "audioMain" master_layer = data.get("heroTrack") if not master_layer: return - asset = data.get("asset") item = data.get("item") clip_name = item.name() @@ -249,24 +276,24 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not self.test_any_audio(item): return - asset = data["asset"] + asset = data["folderPath"] asset_name = data["asset_name"] - # insert family into families - family = "audio" + product_type = "audio" # form label label = "{} -".format(asset) if asset_name != clip_name: label += " ({}) ".format(clip_name) - label += " {}".format(subset) + label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, subset), + "name": "{}_{}".format(asset, product_name), "label": label, - "subset": subset, - "family": family, - "families": ["clip"] + "productName": product_name, + "productType": product_type, + "family": product_type, + "families": [product_type, "clip"] }) # remove review track attr if any data.pop("reviewTrack") diff --git a/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py b/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py index e9e2aae653..8df6cd4261 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py @@ -17,7 +17,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.491 def process(self, context): - asset = context.data["asset"] + asset = context.data["folderPath"] asset_name = asset.split("/")[-1] active_timeline = hiero.ui.activeSequence() @@ -59,17 +59,20 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): 'files': base_name, "stagingDir": staging_dir, } - family = "workfile" + product_type = "workfile" instance_data = { "label": "{} - {}Main".format( - asset, family), - "name": "{}_{}".format(asset_name, family), - "asset": context.data["asset"], - # TODO use 'get_subset_name' - "subset": "{}{}Main".format(asset_name, family.capitalize()), + asset, product_type), + "name": "{}_{}".format(asset_name, product_type), + "folderPath": context.data["folderPath"], + # TODO use 'get_product_name' + "productName": "{}{}Main".format( + asset_name, product_type.capitalize() + ), "item": project, - "family": family, - "families": [], + "productType": product_type, + "family": product_type, + "families": [product_type], "representations": [workfile_representation, thumb_representation] } diff --git a/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py b/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py index ca937f4aa2..96d471115a 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py +++ b/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py @@ -8,7 +8,7 @@ class CollectAssetBuilds(api.ContextPlugin): Tag is expected to have name of the asset and metadata: { - "family": "assetbuild" + "productType": "assetbuild" } """ @@ -29,7 +29,7 @@ class CollectAssetBuilds(api.ContextPlugin): asset_builds[asset_name] = asset_doc for instance in context: - if instance.data["family"] != "clip": + if instance.data["productType"] != "clip": continue # Exclude non-tagged instances. @@ -38,9 +38,11 @@ class CollectAssetBuilds(api.ContextPlugin): for tag in instance.data["tags"]: t_metadata = dict(tag.metadata()) - t_family = t_metadata.get("tag.family", "") + t_product_type = t_metadata.get("tag.productType") + if t_product_type is None: + t_product_type = t_metadata.get("tag.family", "") - if t_family.lower() == "assetbuild": + if t_product_type.lower() == "assetbuild": asset_names.append(tag["name"]) tagged = True diff --git a/client/ayon_core/hosts/hiero/vendor/google/protobuf/descriptor.py b/client/ayon_core/hosts/hiero/vendor/google/protobuf/descriptor.py index ad70be9a11..5d16ae2a0c 100644 --- a/client/ayon_core/hosts/hiero/vendor/google/protobuf/descriptor.py +++ b/client/ayon_core/hosts/hiero/vendor/google/protobuf/descriptor.py @@ -990,7 +990,7 @@ class FileDescriptor(DescriptorBase): :class:`descriptor_pb2.FileDescriptorProto`. dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor` objects this :class:`FileDescriptor` depends on. - public_dependencies (list[FileDescriptor]): A subset of + public_dependencies (list[FileDescriptor]): A product of :attr:`dependencies`, which were declared as "public". message_types_by_name (dict(str, Descriptor)): Mapping from message names to their :class:`Descriptor`. diff --git a/client/ayon_core/hosts/houdini/addon.py b/client/ayon_core/hosts/houdini/addon.py index 34d140db3c..95d714aea1 100644 --- a/client/ayon_core/hosts/houdini/addon.py +++ b/client/ayon_core/hosts/houdini/addon.py @@ -1,16 +1,13 @@ import os -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon HOUDINI_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) -class HoudiniAddon(OpenPypeModule, IHostAddon): +class HoudiniAddon(AYONAddon, IHostAddon): name = "houdini" host_name = "houdini" - def initialize(self, module_settings): - self.enabled = True - def add_implementation_envs(self, env, _app): # Add requirements to HOUDINI_PATH and HOUDINI_MENU_PATH startup_path = os.path.join(HOUDINI_HOST_DIR, "startup") diff --git a/client/ayon_core/hosts/houdini/api/creator_node_shelves.py b/client/ayon_core/hosts/houdini/api/creator_node_shelves.py index 567bb245db..57fdef753a 100644 --- a/client/ayon_core/hosts/houdini/api/creator_node_shelves.py +++ b/client/ayon_core/hosts/houdini/api/creator_node_shelves.py @@ -90,21 +90,21 @@ def create_interactive(creator_identifier, **kwargs): pane = stateutils.activePane(kwargs) if isinstance(pane, hou.NetworkEditor): pwd = pane.pwd() - subset_name = creator.get_subset_name( - variant=variant, - task_name=context.get_current_task_name(), + product_name = creator.get_product_name( + project_name=context.get_current_project_name(), asset_doc=get_asset_by_name( project_name=context.get_current_project_name(), asset_name=context.get_current_asset_name() ), - project_name=context.get_current_project_name(), - host_name=context.host_name + task_name=context.get_current_task_name(), + variant=variant, + host_name=context.host_name, ) tool_fn = CATEGORY_GENERIC_TOOL.get(pwd.childTypeCategory()) if tool_fn is not None: out_null = tool_fn(kwargs, "null") - out_null.setName("OUT_{}".format(subset_name), unique_name=True) + out_null.setName("OUT_{}".format(product_name), unique_name=True) before = context.instances_by_id.copy() diff --git a/client/ayon_core/hosts/houdini/api/lib.py b/client/ayon_core/hosts/houdini/api/lib.py index 7163aebdec..9db055779d 100644 --- a/client/ayon_core/hosts/houdini/api/lib.py +++ b/client/ayon_core/hosts/houdini/api/lib.py @@ -6,6 +6,7 @@ import re import uuid import logging import json +from contextlib import contextmanager import six @@ -834,7 +835,7 @@ def get_current_context_template_data_with_asset_data(): context = get_current_context() project_name = context["project_name"] - asset_name = context["asset_name"] + asset_name = context["folder_path"] task_name = context["task_name"] host_name = get_current_host_name() diff --git a/client/ayon_core/hosts/houdini/api/pipeline.py b/client/ayon_core/hosts/houdini/api/pipeline.py index d93ea9acec..cbc94a2408 100644 --- a/client/ayon_core/hosts/houdini/api/pipeline.py +++ b/client/ayon_core/hosts/houdini/api/pipeline.py @@ -15,6 +15,7 @@ from ayon_core.pipeline import ( register_loader_plugin_path, register_inventory_action_path, AVALON_CONTAINER_ID, + AYON_CONTAINER_ID, ) from ayon_core.pipeline.load import any_outdated_containers from ayon_core.hosts.houdini import HOUDINI_HOST_DIR @@ -271,8 +272,11 @@ def parse_container(container): def ls(): containers = [] - for identifier in (AVALON_CONTAINER_ID, - "pyblish.mindbender.container"): + for identifier in ( + AYON_CONTAINER_ID, + AVALON_CONTAINER_ID, + "pyblish.mindbender.container" + ): containers += lib.lsattr("id", identifier) for container in sorted(containers, diff --git a/client/ayon_core/hosts/houdini/api/plugin.py b/client/ayon_core/hosts/houdini/api/plugin.py index e8f89bfbb4..13cf3c9949 100644 --- a/client/ayon_core/hosts/houdini/api/plugin.py +++ b/client/ayon_core/hosts/houdini/api/plugin.py @@ -11,7 +11,9 @@ from ayon_core.pipeline import ( CreatorError, LegacyCreator, Creator as NewCreator, - CreatedInstance + CreatedInstance, + AYON_INSTANCE_ID, + AVALON_INSTANCE_ID, ) from ayon_core.lib import BoolDef from .lib import imprint, read, lsattr, add_self_publish_button @@ -97,28 +99,28 @@ class Creator(LegacyCreator): class HoudiniCreatorBase(object): @staticmethod - def cache_subsets(shared_data): + def cache_instance_data(shared_data): """Cache instances for Creators to shared data. - Create `houdini_cached_subsets` key when needed in shared data and + Create `houdini_cached_instances` key when needed in shared data and fill it with all collected instances from the scene under its respective creator identifiers. - Create `houdini_cached_legacy_subsets` key for any legacy instances + Create `houdini_cached_legacy_instance` key for any legacy instances detected in the scene as instances per family. Args: Dict[str, Any]: Shared data. - Return: - Dict[str, Any]: Shared data dictionary. - """ - if shared_data.get("houdini_cached_subsets") is None: + if shared_data.get("houdini_cached_instances") is None: cache = dict() cache_legacy = dict() - for node in lsattr("id", "pyblish.avalon.instance"): + nodes = [] + for id_type in [AYON_INSTANCE_ID, AVALON_INSTANCE_ID]: + nodes.extend(lsattr("id", id_type)) + for node in nodes: creator_identifier_parm = node.parm("creator_identifier") if creator_identifier_parm: @@ -136,8 +138,8 @@ class HoudiniCreatorBase(object): family = family_parm.eval() cache_legacy.setdefault(family, []).append(node) - shared_data["houdini_cached_subsets"] = cache - shared_data["houdini_cached_legacy_subsets"] = cache_legacy + shared_data["houdini_cached_instances"] = cache + shared_data["houdini_cached_legacy_instance"] = cache_legacy return shared_data @@ -172,7 +174,7 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): settings_name = None add_publish_button = False - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): try: self.selected_nodes = [] @@ -187,15 +189,15 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): asset_name = instance_data["folderPath"] instance_node = self.create_instance_node( - asset_name, subset_name, "/out", node_type) + asset_name, product_name, "/out", node_type) self.customize_node_look(instance_node) instance_data["instance_node"] = instance_node.path() instance_data["instance_id"] = instance_node.path() instance = CreatedInstance( - self.family, - subset_name, + self.product_type, + product_name, instance_data, self) self._add_instance_to_context(instance) @@ -229,9 +231,9 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): def collect_instances(self): # cache instances if missing - self.cache_subsets(self.collection_shared_data) + self.cache_instance_data(self.collection_shared_data) for instance in self.collection_shared_data[ - "houdini_cached_subsets"].get(self.identifier, []): + "houdini_cached_instances"].get(self.identifier, []): node_data = read(instance) @@ -241,6 +243,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): node_path = instance.path() node_data["instance_id"] = node_path node_data["instance_node"] = node_path + if "AYON_productName" in node_data: + node_data["productName"] = node_data.pop("AYON_productName") created_instance = CreatedInstance.from_existing( node_data, self @@ -264,6 +268,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): def imprint(self, node, values, update=False): # Never store instance node and instance id since that data comes # from the node's path + if "productName" in values: + values["AYON_productName"] = values.pop("productName") values.pop("instance_node", None) values.pop("instance_id", None) imprint(node, values, update=update) diff --git a/client/ayon_core/hosts/houdini/api/usd.py b/client/ayon_core/hosts/houdini/api/usd.py index e900bc5fac..e9c02a0307 100644 --- a/client/ayon_core/hosts/houdini/api/usd.py +++ b/client/ayon_core/hosts/houdini/api/usd.py @@ -7,7 +7,7 @@ from qtpy import QtWidgets, QtCore, QtGui from ayon_core import style from ayon_core.client import get_asset_by_name -from ayon_core.pipeline import legacy_io, get_current_project_name +from ayon_core.pipeline import get_current_project_name from ayon_core.tools.utils.assets_widget import SingleSelectAssetsWidget from pxr import Sdf @@ -27,7 +27,8 @@ class SelectAssetDialog(QtWidgets.QWidget): self.setWindowTitle("Pick Asset") self.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup) - assets_widget = SingleSelectAssetsWidget(legacy_io, parent=self) + assets_widget = SingleSelectAssetsWidget(self) + assets_widget.set_project_name(get_current_project_name(), False) layout = QtWidgets.QHBoxLayout(self) layout.addWidget(assets_widget) diff --git a/client/ayon_core/hosts/houdini/hooks/set_paths.py b/client/ayon_core/hosts/houdini/hooks/set_paths.py index 1f24a8dd7d..7eb346cc74 100644 --- a/client/ayon_core/hosts/houdini/hooks/set_paths.py +++ b/client/ayon_core/hosts/houdini/hooks/set_paths.py @@ -10,7 +10,7 @@ class SetPath(PreLaunchHook): launch_types = {LaunchTypes.local} def execute(self): - workdir = self.launch_context.env.get("AVALON_WORKDIR", "") + workdir = self.launch_context.env.get("AYON_WORKDIR", "") if not workdir: self.log.warning("BUG: Workdir is not filled.") return diff --git a/client/ayon_core/hosts/houdini/plugins/create/convert_legacy.py b/client/ayon_core/hosts/houdini/plugins/create/convert_legacy.py index 6e503fba6b..008187d9c8 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/convert_legacy.py +++ b/client/ayon_core/hosts/houdini/plugins/create/convert_legacy.py @@ -1,24 +1,24 @@ # -*- coding: utf-8 -*- -"""Converter for legacy Houdini subsets.""" +"""Converter for legacy Houdini products.""" from ayon_core.pipeline.create.creator_plugins import SubsetConvertorPlugin from ayon_core.hosts.houdini.api.lib import imprint class HoudiniLegacyConvertor(SubsetConvertorPlugin): - """Find and convert any legacy subsets in the scene. + """Find and convert any legacy products in the scene. - This Converter will find all legacy subsets in the scene and will - transform them to the current system. Since the old subsets doesn't + This Converter will find all legacy products in the scene and will + transform them to the current system. Since the old products doesn't retain any information about their original creators, the only mapping we can do is based on their families. - Its limitation is that you can have multiple creators creating subset - of the same family and there is no way to handle it. This code should - nevertheless cover all creators that came with OpenPype. + Its limitation is that you can have multiple creators creating product + name of the same product type and there is no way to handle it. This code + should nevertheless cover all creators that came with AYON. """ identifier = "io.openpype.creators.houdini.legacy" - family_to_id = { + product_type_to_id = { "camera": "io.openpype.creators.houdini.camera", "ass": "io.openpype.creators.houdini.ass", "imagesequence": "io.openpype.creators.houdini.imagesequence", @@ -33,44 +33,46 @@ class HoudiniLegacyConvertor(SubsetConvertorPlugin): def __init__(self, *args, **kwargs): super(HoudiniLegacyConvertor, self).__init__(*args, **kwargs) - self.legacy_subsets = {} + self.legacy_instances = {} def find_instances(self): - """Find legacy subsets in the scene. + """Find legacy products in the scene. - Legacy subsets are the ones that doesn't have `creator_identifier` + Legacy products are the ones that doesn't have `creator_identifier` parameter on them. This is using cached entries done in - :py:meth:`~HoudiniCreatorBase.cache_subsets()` + :py:meth:`~HoudiniCreatorBase.cache_instance_data()` """ - self.legacy_subsets = self.collection_shared_data.get( - "houdini_cached_legacy_subsets") - if not self.legacy_subsets: + self.legacy_instances = self.collection_shared_data.get( + "houdini_cached_legacy_instance") + if not self.legacy_instances: return - self.add_convertor_item("Found {} incompatible subset{}.".format( - len(self.legacy_subsets), "s" if len(self.legacy_subsets) > 1 else "") - ) + self.add_convertor_item("Found {} incompatible product{}.".format( + len(self.legacy_instances), + "s" if len(self.legacy_instances) > 1 else "" + )) def convert(self): - """Convert all legacy subsets to current. + """Convert all legacy products to current. It is enough to add `creator_identifier` and `instance_node`. """ - if not self.legacy_subsets: + if not self.legacy_instances: return - for family, subsets in self.legacy_subsets.items(): - if family in self.family_to_id: - for subset in subsets: + for product_type, legacy_instances in self.legacy_instances.items(): + if product_type in self.product_type_to_id: + for instance in legacy_instances: + creator_id = self.product_type_to_id[product_type] data = { - "creator_identifier": self.family_to_id[family], - "instance_node": subset.path() + "creator_identifier": creator_id, + "instance_node": instance.path() } - if family == "pointcache": + if product_type == "pointcache": data["families"] = ["abc"] self.log.info("Converting {} to {}".format( - subset.path(), self.family_to_id[family])) - imprint(subset, data) + instance.path(), creator_id)) + imprint(instance, data) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py b/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py index b6661fe7e4..b61b4cbd46 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -"""Creator plugin for creating alembic camera subsets.""" +"""Creator plugin for creating alembic camera products.""" from ayon_core.hosts.houdini.api import plugin from ayon_core.pipeline import CreatedInstance, CreatorError @@ -11,24 +11,24 @@ class CreateAlembicCamera(plugin.HoudiniCreator): identifier = "io.openpype.creators.houdini.camera" label = "Camera (Abc)" - family = "camera" + product_type = "camera" icon = "camera" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): import hou instance_data.pop("active", None) instance_data.update({"node_type": "alembic"}) instance = super(CreateAlembicCamera, self).create( - subset_name, + product_name, instance_data, pre_create_data) # type: CreatedInstance instance_node = hou.node(instance.get("instance_node")) parms = { "filename": hou.text.expandString( - "$HIP/pyblish/{}.abc".format(subset_name)), + "$HIP/pyblish/{}.abc".format(product_name)), "use_sop_path": False, } diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py index f60f5bc42f..6d992f136a 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py @@ -9,7 +9,7 @@ class CreateArnoldAss(plugin.HoudiniCreator): identifier = "io.openpype.creators.houdini.ass" label = "Arnold ASS" - family = "ass" + product_type = "ass" icon = "magic" # Default extension: `.ass` or `.ass.gz` @@ -17,7 +17,7 @@ class CreateArnoldAss(plugin.HoudiniCreator): # will override it by the value in the project settings ext = ".ass" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): import hou instance_data.pop("active", None) @@ -27,7 +27,7 @@ class CreateArnoldAss(plugin.HoudiniCreator): creator_attributes["farm"] = pre_create_data["farm"] instance = super(CreateArnoldAss, self).create( - subset_name, + product_name, instance_data, pre_create_data) # type: plugin.CreatedInstance @@ -41,7 +41,7 @@ class CreateArnoldAss(plugin.HoudiniCreator): filepath = "{}{}".format( hou.text.expandString("$HIP/pyblish/"), - "{}.$F4{}".format(subset_name, self.ext) + "{}.$F4{}".format(product_name, self.ext) ) parms = { # Render frame range @@ -54,7 +54,7 @@ class CreateArnoldAss(plugin.HoudiniCreator): instance_node.setParms(parms) # Lock any parameters in this list - to_lock = ["ar_ass_export_enable", "family", "id"] + to_lock = ["ar_ass_export_enable", "productType", "id"] self.lock_parameters(instance_node, to_lock) def get_instance_attr_defs(self): diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py index 590a92f56f..b7c5910a4f 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py @@ -7,7 +7,7 @@ class CreateArnoldRop(plugin.HoudiniCreator): identifier = "io.openpype.creators.houdini.arnold_rop" label = "Arnold ROP" - family = "arnold_rop" + product_type = "arnold_rop" icon = "magic" # Default extension @@ -16,7 +16,7 @@ class CreateArnoldRop(plugin.HoudiniCreator): # Default to split export and render jobs export_job = True - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): import hou # Remove the active, we are checking the bypass flag of the nodes @@ -29,7 +29,7 @@ class CreateArnoldRop(plugin.HoudiniCreator): instance_data["farm"] = pre_create_data.get("farm") instance = super(CreateArnoldRop, self).create( - subset_name, + product_name, instance_data, pre_create_data) # type: plugin.CreatedInstance @@ -37,9 +37,9 @@ class CreateArnoldRop(plugin.HoudiniCreator): ext = pre_create_data.get("image_format") - filepath = "{renders_dir}{subset_name}/{subset_name}.$F4.{ext}".format( + filepath = "{renders_dir}{product_name}/{product_name}.$F4.{ext}".format( renders_dir=hou.text.expandString("$HIP/pyblish/renders/"), - subset_name=subset_name, + product_name=product_name, ext=ext, ) parms = { @@ -53,9 +53,9 @@ class CreateArnoldRop(plugin.HoudiniCreator): if pre_create_data.get("export_job"): ass_filepath = \ - "{export_dir}{subset_name}/{subset_name}.$F4.ass".format( + "{export_dir}{product_name}/{product_name}.$F4.ass".format( export_dir=hou.text.expandString("$HIP/pyblish/ass/"), - subset_name=subset_name, + product_name=product_name, ) parms["ar_ass_export_enable"] = 1 parms["ar_ass_file"] = ass_filepath @@ -63,7 +63,7 @@ class CreateArnoldRop(plugin.HoudiniCreator): instance_node.setParms(parms) # Lock any parameters in this list - to_lock = ["family", "id"] + to_lock = ["productType", "id"] self.lock_parameters(instance_node, to_lock) def get_pre_create_attr_defs(self): diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py b/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py index 135c889b3e..92c89c71cb 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py @@ -10,10 +10,10 @@ class CreateBGEO(plugin.HoudiniCreator): """BGEO pointcache creator.""" identifier = "io.openpype.creators.houdini.bgeo" label = "PointCache (Bgeo)" - family = "pointcache" + product_type = "pointcache" icon = "gears" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): instance_data.pop("active", None) @@ -23,7 +23,7 @@ class CreateBGEO(plugin.HoudiniCreator): creator_attributes["farm"] = pre_create_data["farm"] instance = super(CreateBGEO, self).create( - subset_name, + product_name, instance_data, pre_create_data) # type: CreatedInstance @@ -32,7 +32,7 @@ class CreateBGEO(plugin.HoudiniCreator): file_path = "{}{}".format( hou.text.expandString("$HIP/pyblish/"), "{}.$F4.{}".format( - subset_name, + product_name, pre_create_data.get("bgeo_type") or "bgeo.sc") ) parms = { diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_composite.py b/client/ayon_core/hosts/houdini/plugins/create/create_composite.py index b87e1fd5b1..a1104e5093 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_composite.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_composite.py @@ -11,26 +11,26 @@ class CreateCompositeSequence(plugin.HoudiniCreator): identifier = "io.openpype.creators.houdini.imagesequence" label = "Composite (Image Sequence)" - family = "imagesequence" + product_type = "imagesequence" icon = "gears" ext = ".exr" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): import hou # noqa instance_data.pop("active", None) instance_data.update({"node_type": "comp"}) instance = super(CreateCompositeSequence, self).create( - subset_name, + product_name, instance_data, pre_create_data) # type: CreatedInstance instance_node = hou.node(instance.get("instance_node")) filepath = "{}{}".format( hou.text.expandString("$HIP/pyblish/"), - "{}.$F4{}".format(subset_name, self.ext) + "{}.$F4{}".format(product_name, self.ext) ) parms = { "trange": 1, diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_hda.py b/client/ayon_core/hosts/houdini/plugins/create/create_hda.py index faddc11b0c..994977de7d 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_hda.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_hda.py @@ -13,14 +13,14 @@ class CreateHDA(plugin.HoudiniCreator): identifier = "io.openpype.creators.houdini.hda" label = "Houdini Digital Asset (Hda)" - family = "hda" + product_type = "hda" icon = "gears" maintain_selection = False - def _check_existing(self, asset_name, subset_name): + def _check_existing(self, asset_name, product_name): # type: (str) -> bool - """Check if existing subset name versions already exists.""" - # Get all subsets of the current asset + """Check if existing product name versions already exists.""" + # Get all products of the current folder project_name = self.project_name asset_doc = get_asset_by_name( project_name, asset_name, fields=["_id"] @@ -28,11 +28,11 @@ class CreateHDA(plugin.HoudiniCreator): subset_docs = get_subsets( project_name, asset_ids=[asset_doc["_id"]], fields=["name"] ) - existing_subset_names_low = { + existing_product_names_low = { subset_doc["name"].lower() for subset_doc in subset_docs } - return subset_name.lower() in existing_subset_names_low + return product_name.lower() in existing_product_names_low def create_instance_node( self, asset_name, node_name, parent, node_type="geometry" @@ -64,7 +64,7 @@ class CreateHDA(plugin.HoudiniCreator): hda_node.layoutChildren() elif self._check_existing(asset_name, node_name): raise plugin.OpenPypeCreatorError( - ("subset {} is already published with different HDA" + ("product {} is already published with different HDA" "definition.").format(node_name)) else: hda_node = to_hda @@ -73,11 +73,11 @@ class CreateHDA(plugin.HoudiniCreator): self.customize_node_look(hda_node) return hda_node - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): instance_data.pop("active", None) instance = super(CreateHDA, self).create( - subset_name, + product_name, instance_data, pre_create_data) # type: plugin.CreatedInstance diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py index 5211044fea..9eb9d80cd3 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py @@ -9,10 +9,10 @@ class CreateKarmaROP(plugin.HoudiniCreator): """Karma ROP""" identifier = "io.openpype.creators.houdini.karma_rop" label = "Karma ROP" - family = "karma_rop" + product_type = "karma_rop" icon = "magic" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): import hou # noqa instance_data.pop("active", None) @@ -23,7 +23,7 @@ class CreateKarmaROP(plugin.HoudiniCreator): instance_data["farm"] = pre_create_data.get("farm") instance = super(CreateKarmaROP, self).create( - subset_name, + product_name, instance_data, pre_create_data) # type: CreatedInstance @@ -31,19 +31,19 @@ class CreateKarmaROP(plugin.HoudiniCreator): ext = pre_create_data.get("image_format") - filepath = "{renders_dir}{subset_name}/{subset_name}.$F4.{ext}".format( + filepath = "{renders_dir}{product_name}/{product_name}.$F4.{ext}".format( renders_dir=hou.text.expandString("$HIP/pyblish/renders/"), - subset_name=subset_name, + product_name=product_name, ext=ext, ) - checkpoint = "{cp_dir}{subset_name}.$F4.checkpoint".format( + checkpoint = "{cp_dir}{product_name}.$F4.checkpoint".format( cp_dir=hou.text.expandString("$HIP/pyblish/"), - subset_name=subset_name + product_name=product_name ) - usd_directory = "{usd_dir}{subset_name}_$RENDERID".format( + usd_directory = "{usd_dir}{product_name}_$RENDERID".format( usd_dir=hou.text.expandString("$HIP/pyblish/renders/usd_renders/"), # noqa - subset_name=subset_name + product_name=product_name ) parms = { @@ -84,7 +84,7 @@ class CreateKarmaROP(plugin.HoudiniCreator): instance_node.setParms(parms) # Lock some Avalon attributes - to_lock = ["family", "id"] + to_lock = ["productType", "id"] self.lock_parameters(instance_node, to_lock) def get_pre_create_attr_defs(self): diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py index 7f1da13d2e..bb10f3893c 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py @@ -9,10 +9,10 @@ class CreateMantraIFD(plugin.HoudiniCreator): """Mantra .ifd Archive""" identifier = "io.openpype.creators.houdini.mantraifd" label = "Mantra IFD" - family = "mantraifd" + product_type = "mantraifd" icon = "gears" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): import hou instance_data.pop("active", None) instance_data.update({"node_type": "ifd"}) @@ -20,7 +20,7 @@ class CreateMantraIFD(plugin.HoudiniCreator): "creator_attributes", dict()) creator_attributes["farm"] = pre_create_data["farm"] instance = super(CreateMantraIFD, self).create( - subset_name, + product_name, instance_data, pre_create_data) # type: CreatedInstance @@ -28,7 +28,7 @@ class CreateMantraIFD(plugin.HoudiniCreator): filepath = "{}{}".format( hou.text.expandString("$HIP/pyblish/"), - "{}.$F4.ifd".format(subset_name)) + "{}.$F4.ifd".format(product_name)) parms = { # Render frame range "trange": 1, @@ -40,7 +40,7 @@ class CreateMantraIFD(plugin.HoudiniCreator): instance_node.setParms(parms) # Lock any parameters in this list - to_lock = ["soho_outputmode", "family", "id"] + to_lock = ["soho_outputmode", "productType", "id"] self.lock_parameters(instance_node, to_lock) def get_instance_attr_defs(self): diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py index 02252f35d1..f15f49f463 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py @@ -9,13 +9,13 @@ class CreateMantraROP(plugin.HoudiniCreator): """Mantra ROP""" identifier = "io.openpype.creators.houdini.mantra_rop" label = "Mantra ROP" - family = "mantra_rop" + product_type = "mantra_rop" icon = "magic" # Default to split export and render jobs export_job = True - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): import hou # noqa instance_data.pop("active", None) @@ -26,7 +26,7 @@ class CreateMantraROP(plugin.HoudiniCreator): instance_data["farm"] = pre_create_data.get("farm") instance = super(CreateMantraROP, self).create( - subset_name, + product_name, instance_data, pre_create_data) # type: CreatedInstance @@ -34,9 +34,9 @@ class CreateMantraROP(plugin.HoudiniCreator): ext = pre_create_data.get("image_format") - filepath = "{renders_dir}{subset_name}/{subset_name}.$F4.{ext}".format( + filepath = "{renders_dir}{product_name}/{product_name}.$F4.{ext}".format( renders_dir=hou.text.expandString("$HIP/pyblish/renders/"), - subset_name=subset_name, + product_name=product_name, ext=ext, ) @@ -49,9 +49,9 @@ class CreateMantraROP(plugin.HoudiniCreator): if pre_create_data.get("export_job"): ifd_filepath = \ - "{export_dir}{subset_name}/{subset_name}.$F4.ifd".format( + "{export_dir}{product_name}/{product_name}.$F4.ifd".format( export_dir=hou.text.expandString("$HIP/pyblish/ifd/"), - subset_name=subset_name, + product_name=product_name, ) parms["soho_outputmode"] = 1 parms["soho_diskfile"] = ifd_filepath @@ -75,7 +75,7 @@ class CreateMantraROP(plugin.HoudiniCreator): instance_node.setParms(parms) # Lock some Avalon attributes - to_lock = ["family", "id"] + to_lock = ["productType", "id"] self.lock_parameters(instance_node, to_lock) def get_pre_create_attr_defs(self): diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_pointcache.py b/client/ayon_core/hosts/houdini/plugins/create/create_pointcache.py index 07dcc17f25..9e0a335c3a 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_pointcache.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_pointcache.py @@ -11,10 +11,10 @@ class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" identifier = "io.openpype.creators.houdini.pointcache" label = "PointCache (Abc)" - family = "pointcache" + product_type = "pointcache" icon = "gears" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): instance_data.pop("active", None) instance_data.update({"node_type": "alembic"}) creator_attributes = instance_data.setdefault( @@ -22,7 +22,7 @@ class CreatePointCache(plugin.HoudiniCreator): creator_attributes["farm"] = pre_create_data["farm"] instance = super(CreatePointCache, self).create( - subset_name, + product_name, instance_data, pre_create_data) @@ -35,7 +35,7 @@ class CreatePointCache(plugin.HoudiniCreator): "format": 2, "facesets": 0, "filename": hou.text.expandString( - "$HIP/pyblish/{}.abc".format(subset_name)) + "$HIP/pyblish/{}.abc".format(product_name)) } if self.selected_nodes: diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_redshift_proxy.py b/client/ayon_core/hosts/houdini/plugins/create/create_redshift_proxy.py index fa42411a1c..6a9321b95a 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_redshift_proxy.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_redshift_proxy.py @@ -9,10 +9,10 @@ class CreateRedshiftProxy(plugin.HoudiniCreator): """Redshift Proxy""" identifier = "io.openpype.creators.houdini.redshiftproxy" label = "Redshift Proxy" - family = "redshiftproxy" + product_type = "redshiftproxy" icon = "magic" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): # Remove the active, we are checking the bypass flag of the nodes instance_data.pop("active", None) @@ -30,14 +30,14 @@ class CreateRedshiftProxy(plugin.HoudiniCreator): creator_attributes["farm"] = pre_create_data["farm"] instance = super(CreateRedshiftProxy, self).create( - subset_name, + product_name, instance_data, pre_create_data) instance_node = hou.node(instance.get("instance_node")) parms = { - "RS_archive_file": '$HIP/pyblish/{}.$F4.rs'.format(subset_name), + "RS_archive_file": '$HIP/pyblish/{}.$F4.rs'.format(product_name), } if self.selected_nodes: @@ -46,7 +46,7 @@ class CreateRedshiftProxy(plugin.HoudiniCreator): instance_node.setParms(parms) # Lock some Avalon attributes - to_lock = ["family", "id", "prim_to_detail_pattern"] + to_lock = ["productType", "id", "prim_to_detail_pattern"] self.lock_parameters(instance_node, to_lock) def get_network_categories(self): diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_redshift_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_redshift_rop.py index 8e88c690b9..3d6d657cf0 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_redshift_rop.py @@ -11,14 +11,14 @@ class CreateRedshiftROP(plugin.HoudiniCreator): identifier = "io.openpype.creators.houdini.redshift_rop" label = "Redshift ROP" - family = "redshift_rop" + product_type = "redshift_rop" icon = "magic" ext = "exr" # Default to split export and render jobs split_render = True - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): instance_data.pop("active", None) instance_data.update({"node_type": "Redshift_ROP"}) @@ -28,7 +28,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator): instance_data["farm"] = pre_create_data.get("farm") instance = super(CreateRedshiftROP, self).create( - subset_name, + product_name, instance_data, pre_create_data) @@ -56,9 +56,9 @@ class CreateRedshiftROP(plugin.HoudiniCreator): ipr_rop.parm("linked_rop").set(instance_node.path()) ext = pre_create_data.get("image_format") - filepath = "{renders_dir}{subset_name}/{subset_name}.{fmt}".format( + filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format( renders_dir=hou.text.expandString("$HIP/pyblish/renders/"), - subset_name=subset_name, + product_name=product_name, fmt="${aov}.$F4.{ext}".format(aov="AOV", ext=ext) ) @@ -83,7 +83,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator): parms["RS_renderCamera"] = camera or "" export_dir = hou.text.expandString("$HIP/pyblish/rs/") - rs_filepath = f"{export_dir}{subset_name}/{subset_name}.$F4.rs" + rs_filepath = f"{export_dir}{product_name}/{product_name}.$F4.rs" parms["RS_archive_file"] = rs_filepath if pre_create_data.get("split_render", self.split_render): @@ -92,7 +92,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator): instance_node.setParms(parms) # Lock some Avalon attributes - to_lock = ["family", "id"] + to_lock = ["productType", "id"] self.lock_parameters(instance_node, to_lock) def remove_instances(self, instances): diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_review.py b/client/ayon_core/hosts/houdini/plugins/create/create_review.py index c512a61105..18f7ce498d 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_review.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_review.py @@ -12,10 +12,10 @@ class CreateReview(plugin.HoudiniCreator): identifier = "io.openpype.creators.houdini.review" label = "Review" - family = "review" + product_type = "review" icon = "video-camera" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): instance_data.pop("active", None) instance_data.update({"node_type": "opengl"}) @@ -23,7 +23,7 @@ class CreateReview(plugin.HoudiniCreator): instance_data["keepImages"] = pre_create_data.get("keepImages") instance = super(CreateReview, self).create( - subset_name, + product_name, instance_data, pre_create_data) @@ -31,9 +31,10 @@ class CreateReview(plugin.HoudiniCreator): frame_range = hou.playbar.frameRange() - filepath = "{root}/{subset}/{subset}.$F4.{ext}".format( + filepath = "{root}/{product_name}/{product_name}.$F4.{ext}".format( root=hou.text.expandString("$HIP/pyblish"), - subset="`chs(\"subset\")`", # keep dynamic link to subset + # keep dynamic link to product name + product_name="`chs(\"AYON_productName\")`", ext=pre_create_data.get("image_format") or "png" ) @@ -89,7 +90,7 @@ class CreateReview(plugin.HoudiniCreator): if os.getenv("OCIO"): self.set_colorcorrect_to_default_view_space(instance_node) - to_lock = ["id", "family"] + to_lock = ["id", "productType"] self.lock_parameters(instance_node, to_lock) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_staticmesh.py b/client/ayon_core/hosts/houdini/plugins/create/create_staticmesh.py index 319be3568d..bc8a2507cd 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_staticmesh.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_staticmesh.py @@ -11,17 +11,17 @@ class CreateStaticMesh(plugin.HoudiniCreator): identifier = "io.openpype.creators.houdini.staticmesh.fbx" label = "Static Mesh (FBX)" - family = "staticMesh" + product_type = "staticMesh" icon = "fa5s.cubes" default_variants = ["Main"] - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): instance_data.update({"node_type": "filmboxfbx"}) instance = super(CreateStaticMesh, self).create( - subset_name, + product_name, instance_data, pre_create_data) @@ -30,7 +30,7 @@ class CreateStaticMesh(plugin.HoudiniCreator): # prepare parms output_path = hou.text.expandString( - "$HIP/pyblish/{}.fbx".format(subset_name) + "$HIP/pyblish/{}.fbx".format(product_name) ) parms = { @@ -48,7 +48,7 @@ class CreateStaticMesh(plugin.HoudiniCreator): instance_node.setParms(parms) # Lock any parameters in this list - to_lock = ["family", "id"] + to_lock = ["productType", "id"] self.lock_parameters(instance_node, to_lock) def get_network_categories(self): @@ -88,14 +88,14 @@ class CreateStaticMesh(plugin.HoudiniCreator): return attrs + [createsubnetroot, vcformat, convert_units] def get_dynamic_data( - self, variant, task_name, asset_doc, project_name, host_name, instance + self, project_name, asset_doc, task_name, variant, host_name, instance ): """ - The default subset name templates for Unreal include {asset} and thus + The default prodcut name templates for Unreal include {asset} and thus we should pass that along as dynamic data. """ dynamic_data = super(CreateStaticMesh, self).get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name, instance + project_name, asset_doc, task_name, variant, host_name, instance ) dynamic_data["asset"] = asset_doc["name"] return dynamic_data diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_usd.py b/client/ayon_core/hosts/houdini/plugins/create/create_usd.py index db9c77fffe..ee05639368 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_usd.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_usd.py @@ -10,24 +10,24 @@ class CreateUSD(plugin.HoudiniCreator): """Universal Scene Description""" identifier = "io.openpype.creators.houdini.usd" label = "USD (experimental)" - family = "usd" + product_type = "usd" icon = "gears" enabled = False - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): instance_data.pop("active", None) instance_data.update({"node_type": "usd"}) instance = super(CreateUSD, self).create( - subset_name, + product_name, instance_data, pre_create_data) # type: CreatedInstance instance_node = hou.node(instance.get("instance_node")) parms = { - "lopoutput": "$HIP/pyblish/{}.usd".format(subset_name), + "lopoutput": "$HIP/pyblish/{}.usd".format(product_name), "enableoutputprocessor_simplerelativepaths": False, } @@ -40,7 +40,7 @@ class CreateUSD(plugin.HoudiniCreator): to_lock = [ "fileperframe", # Lock some Avalon attributes - "family", + "productType", "id", ] self.lock_parameters(instance_node, to_lock) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py b/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py index 72a2d2fc7f..0a5c8896a8 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py @@ -8,10 +8,10 @@ class CreateUSDRender(plugin.HoudiniCreator): """USD Render ROP in /stage""" identifier = "io.openpype.creators.houdini.usdrender" label = "USD Render (experimental)" - family = "usdrender" + product_type = "usdrender" icon = "magic" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): import hou # noqa instance_data["parent"] = hou.node("/stage") @@ -21,7 +21,7 @@ class CreateUSDRender(plugin.HoudiniCreator): instance_data.update({"node_type": "usdrender"}) instance = super(CreateUSDRender, self).create( - subset_name, + product_name, instance_data, pre_create_data) # type: CreatedInstance @@ -37,5 +37,5 @@ class CreateUSDRender(plugin.HoudiniCreator): instance_node.setParms(parms) # Lock some Avalon attributes - to_lock = ["family", "id"] + to_lock = ["productType", "id"] self.lock_parameters(instance_node, to_lock) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py b/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py index 507917b7a5..9ac7ebdff7 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py @@ -12,10 +12,10 @@ class CreateVDBCache(plugin.HoudiniCreator): identifier = "io.openpype.creators.houdini.vdbcache" name = "vbdcache" label = "VDB Cache" - family = "vdbcache" + product_type = "vdbcache" icon = "cloud" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): import hou instance_data.pop("active", None) @@ -24,14 +24,14 @@ class CreateVDBCache(plugin.HoudiniCreator): "creator_attributes", dict()) creator_attributes["farm"] = pre_create_data["farm"] instance = super(CreateVDBCache, self).create( - subset_name, + product_name, instance_data, pre_create_data) # type: CreatedInstance instance_node = hou.node(instance.get("instance_node")) file_path = "{}{}".format( hou.text.expandString("$HIP/pyblish/"), - "{}.$F4.vdb".format(subset_name)) + "{}.$F4.vdb".format(product_name)) parms = { "sopoutput": file_path, "initsim": True, diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py index 609828e201..739796dc7c 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py @@ -12,14 +12,14 @@ class CreateVrayROP(plugin.HoudiniCreator): identifier = "io.openpype.creators.houdini.vray_rop" label = "VRay ROP" - family = "vray_rop" + product_type = "vray_rop" icon = "magic" ext = "exr" # Default to split export and render jobs export_job = True - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): instance_data.pop("active", None) instance_data.update({"node_type": "vray_renderer"}) @@ -29,7 +29,7 @@ class CreateVrayROP(plugin.HoudiniCreator): instance_data["farm"] = pre_create_data.get("farm") instance = super(CreateVrayROP, self).create( - subset_name, + product_name, instance_data, pre_create_data) # type: CreatedInstance @@ -57,9 +57,9 @@ class CreateVrayROP(plugin.HoudiniCreator): if pre_create_data.get("export_job"): scene_filepath = \ - "{export_dir}{subset_name}/{subset_name}.$F4.vrscene".format( + "{export_dir}{product_name}/{product_name}.$F4.vrscene".format( export_dir=hou.text.expandString("$HIP/pyblish/vrscene/"), - subset_name=subset_name, + product_name=product_name, ) # Setting render_export_mode to "2" because that's for # "Export only" ("1" is for "Export & Render") @@ -81,16 +81,16 @@ class CreateVrayROP(plugin.HoudiniCreator): instance_data["RenderElement"] = pre_create_data.get("render_element_enabled") # noqa if pre_create_data.get("render_element_enabled", True): # Vray has its own tag for AOV file output - filepath = "{renders_dir}{subset_name}/{subset_name}.{fmt}".format( + filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format( renders_dir=hou.text.expandString("$HIP/pyblish/renders/"), - subset_name=subset_name, + product_name=product_name, fmt="${aov}.$F4.{ext}".format(aov="AOV", ext=ext) ) filepath = "{}{}".format( hou.text.expandString("$HIP/pyblish/renders/"), - "{}/{}.${}.$F4.{}".format(subset_name, - subset_name, + "{}/{}.${}.$F4.{}".format(product_name, + product_name, "AOV", ext) ) @@ -108,9 +108,9 @@ class CreateVrayROP(plugin.HoudiniCreator): }) else: - filepath = "{renders_dir}{subset_name}/{subset_name}.{fmt}".format( + filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format( renders_dir=hou.text.expandString("$HIP/pyblish/renders/"), - subset_name=subset_name, + product_name=product_name, fmt="$F4.{ext}".format(ext=ext) ) parms.update({ @@ -125,7 +125,7 @@ class CreateVrayROP(plugin.HoudiniCreator): instance_node.setParms(parms) # lock parameters from AVALON - to_lock = ["family", "id"] + to_lock = ["productType", "id"] self.lock_parameters(instance_node, to_lock) def remove_instances(self, instances): diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_workfile.py b/client/ayon_core/hosts/houdini/plugins/create/create_workfile.py index 19631566df..631ef6ce77 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_workfile.py @@ -12,7 +12,7 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): """Workfile auto-creator.""" identifier = "io.openpype.creators.houdini.workfile" label = "Workfile" - family = "workfile" + product_type = "workfile" icon = "fa5.file" default_variant = "Main" @@ -37,8 +37,12 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): if current_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + variant, + host_name, ) data = { "folderPath": asset_name, @@ -48,12 +52,16 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): data.update( self.get_dynamic_data( - variant, task_name, asset_doc, - project_name, host_name, current_instance) + project_name, + asset_doc, + task_name, + variant, + host_name, + current_instance) ) self.log.info("Auto-creating workfile instance...") current_instance = CreatedInstance( - self.family, subset_name, data, self + self.product_type, product_name, data, self ) self._add_instance_to_context(current_instance) elif ( @@ -62,12 +70,16 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): ): # Update instance context if is not the same asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + variant, + host_name, ) current_instance["folderPath"] = asset_name current_instance["task"] = task_name - current_instance["subset"] = subset_name + current_instance["productName"] = product_name # write workfile information to context container. op_ctx = hou.node(CONTEXT_CONTAINER) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py b/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py index 6996b0d117..5e138cde83 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py @@ -81,8 +81,8 @@ class AbcLoader(load.LoaderPlugin): suffix="", ) - def update(self, container, representation): - + def update(self, container, context): + repre_doc = context["representation"] node = container["node"] try: alembic_node = next( @@ -93,18 +93,18 @@ class AbcLoader(load.LoaderPlugin): return # Update the file path - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_doc) file_path = file_path.replace("\\", "/") alembic_node.setParms({"fileName": file_path}) # Update attribute - node.setParms({"representation": str(representation["_id"])}) + node.setParms({"representation": str(repre_doc["_id"])}) def remove(self, container): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_alembic_archive.py b/client/ayon_core/hosts/houdini/plugins/load/load_alembic_archive.py index cfe3b16ebb..0d505806ff 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_alembic_archive.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_alembic_archive.py @@ -55,17 +55,17 @@ class AbcArchiveLoader(load.LoaderPlugin): self.__class__.__name__, suffix="") - def update(self, container, representation): - + def update(self, container, context): + repre_doc = context["representation"] node = container["node"] # Update the file path - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_doc) file_path = file_path.replace("\\", "/") # Update attributes node.setParms({"fileName": file_path, - "representation": str(representation["_id"])}) + "representation": str(repre_doc["_id"])}) # Rebuild node.parm("buildHierarchy").pressButton() @@ -75,5 +75,5 @@ class AbcArchiveLoader(load.LoaderPlugin): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_ass.py b/client/ayon_core/hosts/houdini/plugins/load/load_ass.py index 6fbe315adb..396eb3a9f7 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_ass.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_ass.py @@ -48,13 +48,14 @@ class AssLoader(load.LoaderPlugin): suffix="", ) - def update(self, container, representation): + def update(self, container, context): # Update the file path + repre_doc = context["representation"] procedural = container["node"] - procedural.setParms({"ar_filename": self.format_path(representation)}) + procedural.setParms({"ar_filename": self.format_path(repre_doc)}) # Update attribute - procedural.setParms({"representation": str(representation["_id"])}) + procedural.setParms({"representation": str(repre_doc["_id"])}) def remove(self, container): node = container["node"] @@ -86,5 +87,5 @@ class AssLoader(load.LoaderPlugin): return os.path.normpath(path).replace("\\", "/") - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_bgeo.py b/client/ayon_core/hosts/houdini/plugins/load/load_bgeo.py index afcf82562c..4817e40961 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_bgeo.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_bgeo.py @@ -82,8 +82,8 @@ class BgeoLoader(load.LoaderPlugin): return filename - def update(self, container, representation): - + def update(self, container, context): + repre_doc = context["representation"] node = container["node"] try: file_node = next( @@ -94,18 +94,18 @@ class BgeoLoader(load.LoaderPlugin): return # Update the file path - file_path = get_representation_path(representation) - file_path = self.format_path(file_path, representation) + file_path = get_representation_path(repre_doc) + file_path = self.format_path(file_path, repre_doc) file_node.setParms({"file": file_path}) # Update attribute - node.setParms({"representation": str(representation["_id"])}) + node.setParms({"representation": str(repre_doc["_id"])}) def remove(self, container): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_camera.py b/client/ayon_core/hosts/houdini/plugins/load/load_camera.py index 11826fb30d..6f6560facc 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_camera.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_camera.py @@ -132,17 +132,17 @@ class CameraLoader(load.LoaderPlugin): self.__class__.__name__, suffix="") - def update(self, container, representation): - + def update(self, container, context): + repre_doc = context["representation"] node = container["node"] # Update the file path - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_doc) file_path = file_path.replace("\\", "/") # Update attributes node.setParms({"fileName": file_path, - "representation": str(representation["_id"])}) + "representation": str(repre_doc["_id"])}) # Store the cam temporarily next to the Alembic Archive # so that we can preserve parm values the user set on it diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_fbx.py b/client/ayon_core/hosts/houdini/plugins/load/load_fbx.py index cc1a746d93..4857dbb900 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_fbx.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_fbx.py @@ -47,8 +47,8 @@ class FbxLoader(load.LoaderPlugin): return containerised_nodes - def update(self, container, representation): - + def update(self, container, context): + repre_doc = context["representation"] node = container["node"] try: file_node = next( @@ -59,21 +59,21 @@ class FbxLoader(load.LoaderPlugin): return # Update the file path from representation - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_doc) file_path = file_path.replace("\\", "/") file_node.setParms({"file": file_path}) # Update attribute - node.setParms({"representation": str(representation["_id"])}) + node.setParms({"representation": str(repre_doc["_id"])}) def remove(self, container): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def get_node_name(self, context, name=None, namespace=None): """Define node name.""" @@ -88,7 +88,7 @@ class FbxLoader(load.LoaderPlugin): return namespace, node_name - def create_load_node_tree(self, file_path, node_name, subset_name): + def create_load_node_tree(self, file_path, node_name, product_name): """Create Load network. you can start building your tree at any obj level. @@ -118,14 +118,14 @@ class FbxLoader(load.LoaderPlugin): file_node.setParms({"file": file_path}) # Create attribute delete - attribdelete_name = "attribdelete_{}".format(subset_name) + attribdelete_name = "attribdelete_{}".format(product_name) attribdelete = parent_node.createNode("attribdelete", node_name=attribdelete_name) attribdelete.setParms({"ptdel": "fbx_*"}) attribdelete.setInput(0, file_node) # Create a Null node - null_name = "OUT_{}".format(subset_name) + null_name = "OUT_{}".format(product_name) null = parent_node.createNode("null", node_name=null_name) null.setInput(0, attribdelete) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_hda.py b/client/ayon_core/hosts/houdini/plugins/load/load_hda.py index 288152f2bd..ffe9e55036 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_hda.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_hda.py @@ -48,11 +48,12 @@ class HdaLoader(load.LoaderPlugin): suffix="", ) - def update(self, container, representation): + def update(self, container, context): import hou + repre_doc = context["representation"] hda_node = container["node"] - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_doc) file_path = file_path.replace("\\", "/") hou.hda.installFile(file_path) defs = hda_node.type().allInstalledDefinitions() @@ -60,7 +61,7 @@ class HdaLoader(load.LoaderPlugin): new = def_paths.index(file_path) defs[new].setIsPreferred(True) hda_node.setParms({ - "representation": str(representation["_id"]) + "representation": str(repre_doc["_id"]) }) def remove(self, container): diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_image.py b/client/ayon_core/hosts/houdini/plugins/load/load_image.py index 20fe2f87ca..c89cc3b173 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_image.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_image.py @@ -87,12 +87,12 @@ class ImageLoader(load.LoaderPlugin): return node - def update(self, container, representation): - + def update(self, container, context): + repre_doc = context["representation"] node = container["node"] # Update the file path - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_doc) file_path = file_path.replace("\\", "/") file_path = self._get_file_sequence(file_path) @@ -100,7 +100,7 @@ class ImageLoader(load.LoaderPlugin): node.setParms( { "filename1": file_path, - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), } ) @@ -128,5 +128,5 @@ class ImageLoader(load.LoaderPlugin): fname = ".".join([prefix, "$F{}".format(len(padding)), suffix]) return os.path.join(root, fname).replace("\\", "/") - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_redshift_proxy.py b/client/ayon_core/hosts/houdini/plugins/load/load_redshift_proxy.py index dd6e78b3bc..3e9ce1ff2e 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_redshift_proxy.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_redshift_proxy.py @@ -72,19 +72,19 @@ class RedshiftProxyLoader(load.LoaderPlugin): suffix="", ) - def update(self, container, representation): - + def update(self, container, context): + repre_doc = context["representation"] # Update the file path - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_doc) node = container["node"] node.setParms({ "RS_objprop_proxy_file": self.format_path( - file_path, representation) + file_path, repre_doc) }) # Update attribute - node.setParms({"representation": str(representation["_id"])}) + node.setParms({"representation": str(repre_doc["_id"])}) def remove(self, container): diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_usd_layer.py b/client/ayon_core/hosts/houdini/plugins/load/load_usd_layer.py index 2c37c24884..f4f8a718ad 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_usd_layer.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_usd_layer.py @@ -57,19 +57,19 @@ class USDSublayerLoader(load.LoaderPlugin): return container - def update(self, container, representation): - + def update(self, container, context): + repre_doc = context["representation"] node = container["node"] # Update the file path - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_doc) file_path = file_path.replace("\\", "/") # Update attributes node.setParms( { "filepath1": file_path, - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), } ) @@ -81,5 +81,5 @@ class USDSublayerLoader(load.LoaderPlugin): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_usd_reference.py b/client/ayon_core/hosts/houdini/plugins/load/load_usd_reference.py index 9396f00cce..cb83a9a22e 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_usd_reference.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_usd_reference.py @@ -57,19 +57,19 @@ class USDReferenceLoader(load.LoaderPlugin): return container - def update(self, container, representation): - + def update(self, container, context): + repre_doc = context["representation"] node = container["node"] # Update the file path - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_doc) file_path = file_path.replace("\\", "/") # Update attributes node.setParms( { "filepath1": file_path, - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), } ) @@ -81,5 +81,5 @@ class USDReferenceLoader(load.LoaderPlugin): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_vdb.py b/client/ayon_core/hosts/houdini/plugins/load/load_vdb.py index c3e374ee8d..ed38e5a5d9 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_vdb.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_vdb.py @@ -79,8 +79,8 @@ class VdbLoader(load.LoaderPlugin): return filename - def update(self, container, representation): - + def update(self, container, context): + repre_doc = context["representation"] node = container["node"] try: file_node = next( @@ -91,18 +91,18 @@ class VdbLoader(load.LoaderPlugin): return # Update the file path - file_path = get_representation_path(representation) - file_path = self.format_path(file_path, representation) + file_path = get_representation_path(repre_doc) + file_path = self.format_path(file_path, repre_doc) file_node.setParms({"file": file_path}) # Update attribute - node.setParms({"representation": str(representation["_id"])}) + node.setParms({"representation": str(repre_doc["_id"])}) def remove(self, container): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_cache_farm.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_cache_farm.py index f91c253c25..040ad68a1a 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_cache_farm.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_cache_farm.py @@ -44,7 +44,7 @@ class CollectDataforCache(pyblish.api.InstancePlugin): # because ??? for family in instance.data["families"]: if family == "bgeo" or "abc": - instance.data["family"] = "pointcache" + instance.data["productType"] = "pointcache" break instance.data.update({ "plugin": "Houdini", diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_instances.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_instances.py index 2780da95d9..edfa78e4d9 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_instances.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_instances.py @@ -2,6 +2,7 @@ import hou import pyblish.api +from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID from ayon_core.hosts.houdini.api import lib @@ -12,11 +13,11 @@ class CollectInstances(pyblish.api.ContextPlugin): an specific node and marked with a unique identifier; Identifier: - id (str): "pyblish.avalon.instance + id (str): "ayon.create.instance" Specific node: The specific node is important because it dictates in which way the - subset is being exported. + product is being exported. alembic: will export Alembic file which supports cascading attributes like 'cbId' and 'path' @@ -44,7 +45,9 @@ class CollectInstances(pyblish.api.ContextPlugin): if not node.parm("id"): continue - if node.evalParm("id") != "pyblish.avalon.instance": + if node.evalParm("id") not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: continue # instance was created by new creator code, skip it as @@ -72,12 +75,14 @@ class CollectInstances(pyblish.api.ContextPlugin): # Create nice name if the instance has a frame range. label = data.get("name", node.name()) - label += " (%s)" % data["asset"] # include asset in name + label += " (%s)" % data["folderPath"] # include folder in name instance = context.create_instance(label) # Include `families` using `family` data - instance.data["families"] = [instance.data["family"]] + product_type = data["family"] + data["productType"] = product_type + instance.data["families"] = [product_type] instance[:] = [node] instance.data["instance_node"] = node.path() @@ -85,7 +90,9 @@ class CollectInstances(pyblish.api.ContextPlugin): def sort_by_family(instance): """Sort by family""" - return instance.data.get("families", instance.data.get("family")) + return instance.data.get( + "families", instance.data.get("productType") + ) # Sort/grouped by family (preserving local index) context[:] = sorted(context, key=sort_by_family) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_instances_usd_layered.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_instances_usd_layered.py index 800d6fb883..38d6ec733d 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_instances_usd_layered.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_instances_usd_layered.py @@ -2,7 +2,7 @@ import hou import pyblish.api from ayon_core.hosts.houdini.api import lib import ayon_core.hosts.houdini.api.usd as hou_usdlib -import ayon_core.lib.usdlib as usdlib +from ayon_core.pipeline import usdlib class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): @@ -12,13 +12,13 @@ class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): layers remain set to 'publish' by the user. This works differently from most of our Avalon instances in the pipeline. - As opposed to storing `pyblish.avalon.instance` as id on the node we store + As opposed to storing `ayon.create.instance` as id on the node we store `pyblish.avalon.usdlayered`. - Additionally this instance has no need for storing family, asset, subset + Additionally this instance has no need for storing family, asset, product or name on the nodes. Instead all information is retrieved solely from the output filepath, which is an Avalon URI: - avalon://{asset}/{subset}.{representation} + avalon://{asset}/{product}.{representation} Each final ROP node is considered a dependency for any of the Configured Save Path layers it sets along the way. As such, the instances shown in @@ -50,14 +50,19 @@ class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): if node.evalParm("id") != "pyblish.avalon.usdlayered": continue - has_family = node.evalParm("family") - assert has_family, "'%s' is missing 'family'" % node.name() + has_product_type = node.evalParm("productType") + assert has_product_type, ( + "'%s' is missing 'productType'" % node.name() + ) self.process_node(node, context) def sort_by_family(instance): """Sort by family""" - return instance.data.get("families", instance.data.get("family")) + return instance.data.get( + "families", + instance.data.get("productType") + ) # Sort/grouped by family (preserving local index) context[:] = sorted(context, key=sort_by_family) @@ -82,8 +87,8 @@ class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): # instead use the "colorbleed.usd" family to integrate. data["publishFamilies"] = ["colorbleed.usd"] - # For now group ALL of them into USD Layer subset group - # Allow this subset to be grouped into a USD Layer on creation + # For now group ALL of them into USD Layer product group + # Allow this product to be grouped into a USD Layer on creation data["subsetGroup"] = "USD Layer" instances = list() @@ -97,7 +102,7 @@ class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): dependency.append(ropnode) dependency.data.update(data) dependency.data.update(dependency_save_data) - dependency.data["family"] = "colorbleed.usd.dependency" + dependency.data["productType"] = "colorbleed.usd.dependency" dependency.data["optional"] = False dependencies.append(dependency) @@ -137,9 +142,9 @@ class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): self.log.warning("Non Avalon URI Layer Path: %s" % save_path) return {} - # Collect asset + subset from URI - name = "{subset} ({asset})".format(**uri_data) - fname = "{asset}_{subset}.{ext}".format(**uri_data) + # Collect asset + product from URI + name = "{product[name]} ({asset})".format(**uri_data) + fname = "{asset}_{product[name]}.{ext}".format(**uri_data) data = dict(uri_data) data["usdSavePath"] = save_path diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_redshift_rop.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_redshift_rop.py index 26dd942559..67cc080ead 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_redshift_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -68,6 +68,10 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): files_by_aov = { "_": self.generate_expected_files(instance, beauty_product)} + + aovs_rop = rop.parm("RS_aovGetFromNode").evalAsNode() + if aovs_rop: + rop = aovs_rop num_aovs = rop.evalParm("RS_aov") for index in range(num_aovs): diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_bootstrap.py index ed54ad8bc1..24ac9f22c3 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -5,13 +5,13 @@ from ayon_core.client import ( get_asset_by_name, get_asset_name_identifier, ) -import ayon_core.lib.usdlib as usdlib +from ayon_core.pipeline import usdlib class CollectUsdBootstrap(pyblish.api.InstancePlugin): """Collect special Asset/Shot bootstrap instances if those are needed. - Some specific subsets are intended to be part of the default structure + Some specific products are intended to be part of the default structure of an "Asset" or "Shot" in our USD pipeline. For example, for an Asset we layer a Model and Shade USD file over each other and expose that in a Asset USD file, ready to use. @@ -30,13 +30,12 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): def process(self, instance): - # Detect whether the current subset is a subset in a pipeline + # Detect whether the current product is a product in a pipeline def get_bootstrap(instance): - instance_subset = instance.data["subset"] + instance_product_name = instance.data["productName"] for name, layers in usdlib.PIPELINE.items(): - if instance_subset in set(layers): + if instance_product_name in set(layers): return name # e.g. "asset" - break else: return @@ -55,7 +54,7 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): self.log.debug("Add bootstrap for: %s" % bootstrap) project_name = instance.context.data["projectName"] - asset_name = instance.data["asset"] + asset_name = instance.data["folderPath"] asset_doc = get_asset_by_name(project_name, asset_name) assert asset_doc, "Asset must exist: %s" % asset_name @@ -72,22 +71,24 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): required += list(layers) self.log.debug("Checking required bootstrap: %s" % required) - for subset_name in required: - if self._subset_exists( - project_name, instance, subset_name, asset_doc + for product_name in required: + if self._product_exists( + project_name, instance, product_name, asset_doc ): continue self.log.debug( "Creating {0} USD bootstrap: {1} {2}".format( - bootstrap, asset_name, subset_name + bootstrap, asset_name, product_name ) ) - new = instance.context.create_instance(subset_name) - new.data["subset"] = subset_name - new.data["label"] = "{0} ({1})".format(subset_name, asset_name) - new.data["family"] = "usd.bootstrap" + product_type = "usd.bootstrap" + new = instance.context.create_instance(product_name) + new.data["productName"] = product_name + new.data["label"] = "{0} ({1})".format(product_name, asset_name) + new.data["productType"] = product_type + new.data["family"] = product_type new.data["comment"] = "Automated bootstrap USD file." new.data["publishFamilies"] = ["usd"] @@ -95,26 +96,28 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): new.data["optional"] = False # Copy some data from the instance for which we bootstrap - for key in ["asset"]: + for key in ["folderPath"]: new.data[key] = instance.data[key] - def _subset_exists(self, project_name, instance, subset_name, asset_doc): - """Return whether subset exists in current context or in database.""" + def _product_exists( + self, project_name, instance, product_name, asset_doc + ): + """Return whether product exists in current context or in database.""" # Allow it to be created during this publish session context = instance.context asset_doc_name = get_asset_name_identifier(asset_doc) for inst in context: if ( - inst.data["subset"] == subset_name - and inst.data["asset"] == asset_doc_name + inst.data["productName"] == product_name + and inst.data["folderPath"] == asset_doc_name ): return True # Or, if they already exist in the database we can # skip them too. if get_subset_by_name( - project_name, subset_name, asset_doc["_id"], fields=["_id"] + project_name, product_name, asset_doc["_id"], fields=["_id"] ): return True return False diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_layers.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_layers.py index e36cd875ba..f085b6ca41 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -45,22 +45,23 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): # Create configured layer instances so User can disable updating # specific configured layers for publishing. context = instance.context + product_type = "usdlayer" for layer, save_path in save_layers: name = os.path.basename(save_path) label = "{0} -> {1}".format(instance.data["name"], name) layer_inst = context.create_instance(name) - family = "usdlayer" - layer_inst.data["family"] = family - layer_inst.data["families"] = [family] - layer_inst.data["subset"] = "__stub__" + layer_inst.data["productType"] = product_type + layer_inst.data["family"] = product_type + layer_inst.data["families"] = [product_type] + layer_inst.data["productName"] = "__stub__" layer_inst.data["label"] = label - layer_inst.data["asset"] = instance.data["asset"] + layer_inst.data["folderPath"] = instance.data["folderPath"] layer_inst.data["instance_node"] = instance.data["instance_node"] # include same USD ROP layer_inst.append(rop_node) # include layer data layer_inst.append((layer, save_path)) - # Allow this subset to be grouped into a USD Layer on creation + # Allow this product to be grouped into a USD Layer on creation layer_inst.data["subsetGroup"] = "USD Layer" diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_vray_rop.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_vray_rop.py index f80ca39f1c..62b7dcdd5d 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_vray_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_vray_rop.py @@ -67,7 +67,7 @@ class CollectVrayROPRenderProducts(pyblish.api.InstancePlugin): beauty_product = self.get_render_product_name(default_prefix) render_products.append(beauty_product) files_by_aov = { - "RGB Color": self.generate_expected_files(instance, + "": self.generate_expected_files(instance, beauty_product)} if instance.data.get("RenderElement", True): @@ -75,7 +75,9 @@ class CollectVrayROPRenderProducts(pyblish.api.InstancePlugin): if render_element: for aov, renderpass in render_element.items(): render_products.append(renderpass) - files_by_aov[aov] = self.generate_expected_files(instance, renderpass) # noqa + files_by_aov[aov] = self.generate_expected_files( + instance, renderpass) + for product in render_products: self.log.debug("Found render product: %s" % product) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/extract_usd_layered.py b/client/ayon_core/hosts/houdini/plugins/publish/extract_usd_layered.py index 7160e3d282..99c61803e6 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -284,29 +284,29 @@ class ExtractUSDLayered(publish.Extractor): # Compare this dependency with the latest published version # to detect whether we should make this into a new publish # version. If not, skip it. - asset = get_asset_by_name( - project_name, dependency.data["asset"], fields=["_id"] + asset_doc = get_asset_by_name( + project_name, dependency.data["folderPath"], fields=["_id"] ) - subset = get_subset_by_name( + subset_doc = get_subset_by_name( project_name, - dependency.data["subset"], - asset["_id"], + dependency.data["productName"], + asset_doc["_id"], fields=["_id"] ) - if not subset: + if not subset_doc: # Subset doesn't exist yet. Definitely new file - self.log.debug("No existing subset..") + self.log.debug("No existing product..") return False - version = get_last_version_by_subset_id( - project_name, subset["_id"], fields=["_id"] + version_doc = get_last_version_by_subset_id( + project_name, subset_doc["_id"], fields=["_id"] ) - if not version: + if not version_doc: self.log.debug("No existing version..") return False representation = get_representation_by_name( - project_name, ext.lstrip("."), version["_id"] + project_name, ext.lstrip("."), version_doc["_id"] ) if not representation: self.log.debug("No existing representation..") diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_file_extension.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_file_extension.py index 1134f8853a..e9a0397a58 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_file_extension.py @@ -45,9 +45,9 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): # Create lookup for current family in instance families = [] - family = instance.data.get("family", None) - if family: - families.append(family) + product_type = instance.data.get("productType") + if product_type: + families.append(product_type) families = set(families) # Perform extension check diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_houdini_license_category.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_houdini_license_category.py index 0df858ca4b..9a68c34405 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_houdini_license_category.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_houdini_license_category.py @@ -28,7 +28,7 @@ class ValidateHoudiniNotApprenticeLicense(pyblish.api.InstancePlugin): if hou.isApprentice(): # Find which family was matched with the plug-in - families = {instance.data["family"]} + families = {instance.data["productType"]} families.update(instance.data.get("families", [])) disallowed_families = families.intersection(self.families) families = " ".join(sorted(disallowed_families)).title() diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_subset_name.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_subset_name.py index 67807b5366..e94f09568d 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_subset_name.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_subset_name.py @@ -10,7 +10,7 @@ from ayon_core.pipeline.publish import ( RepairAction, ) from ayon_core.hosts.houdini.api.action import SelectInvalidAction -from ayon_core.pipeline.create import get_subset_name +from ayon_core.pipeline.create import get_product_name import hou @@ -53,21 +53,23 @@ class ValidateSubsetName(pyblish.api.InstancePlugin, rop_node = hou.node(instance.data["instance_node"]) - # Check subset name + # Check product name asset_doc = instance.data["assetEntity"] - subset_name = get_subset_name( - family=instance.data["family"], + product_name = get_product_name( + instance.context.data["projectName"], + asset_doc, + instance.data["task"], + instance.context.data["hostName"], + instance.data["productType"], variant=instance.data["variant"], - task_name=instance.data["task"], - asset_doc=asset_doc, dynamic_data={"asset": asset_doc["name"]} ) - if instance.data.get("subset") != subset_name: + if instance.data.get("productName") != product_name: invalid.append(rop_node) cls.log.error( - "Invalid subset name on rop node '%s' should be '%s'.", - rop_node.path(), subset_name + "Invalid product name on rop node '%s' should be '%s'.", + rop_node.path(), product_name ) return invalid @@ -76,20 +78,22 @@ class ValidateSubsetName(pyblish.api.InstancePlugin, def repair(cls, instance): rop_node = hou.node(instance.data["instance_node"]) - # Check subset name + # Check product name asset_doc = instance.data["assetEntity"] - subset_name = get_subset_name( - family=instance.data["family"], + product_name = get_product_name( + instance.context.data["projectName"], + asset_doc, + instance.data["task"], + instance.context.data["hostName"], + instance.data["productType"], variant=instance.data["variant"], - task_name=instance.data["task"], - asset_doc=asset_doc, dynamic_data={"asset": asset_doc["name"]} ) - instance.data["subset"] = subset_name - rop_node.parm("subset").set(subset_name) + instance.data["productName"] = product_name + rop_node.parm("AYON_productName").set(product_name) cls.log.debug( - "Subset name on rop node '%s' has been set to '%s'.", - rop_node.path(), subset_name + "Product name on rop node '%s' has been set to '%s'.", + rop_node.path(), product_name ) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_unreal_staticmesh_naming.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_unreal_staticmesh_naming.py index dbee293074..33d0d42383 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_unreal_staticmesh_naming.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_unreal_staticmesh_naming.py @@ -23,7 +23,7 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin, - USP - UCX - This validator also checks if subset name is correct + This validator also checks if product name is correct - {static mesh prefix}_{Asset-Name}{Variant}. """ @@ -39,7 +39,7 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin, static_mesh_prefix = "" @classmethod - def apply_settings(cls, project_settings, system_settings): + def apply_settings(cls, project_settings): settings = ( project_settings["houdini"]["create"]["CreateStaticMesh"] diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py index 8fa20ace02..6d21b59a9c 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -18,23 +18,25 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): def process(self, instance): project_name = instance.context.data["projectName"] - asset_name = instance.data["asset"] - subset = instance.data["subset"] + asset_name = instance.data["folderPath"] + product_name = instance.data["productName"] # Assume shading variation starts after a dot separator - shade_subset = subset.split(".", 1)[0] - model_subset = re.sub("^usdShade", "usdModel", shade_subset) + shade_product_name = product_name.split(".", 1)[0] + model_product_name = re.sub( + "^usdShade", "usdModel", shade_product_name + ) asset_doc = instance.data.get("assetEntity") if not asset_doc: raise RuntimeError("Asset document is not filled on instance.") subset_doc = get_subset_by_name( - project_name, model_subset, asset_doc["_id"], fields=["_id"] + project_name, model_product_name, asset_doc["_id"], fields=["_id"] ) if not subset_doc: raise PublishValidationError( - ("USD Model subset not found: " - "{} ({})").format(model_subset, asset_name), + ("USD Model product not found: " + "{} ({})").format(model_product_name, asset_name), title=self.label ) diff --git a/client/ayon_core/hosts/max/addon.py b/client/ayon_core/hosts/max/addon.py index 416014025c..12f5f7eca0 100644 --- a/client/ayon_core/hosts/max/addon.py +++ b/client/ayon_core/hosts/max/addon.py @@ -1,17 +1,14 @@ # -*- coding: utf-8 -*- import os -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon MAX_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) -class MaxAddon(OpenPypeModule, IHostAddon): +class MaxAddon(AYONAddon, IHostAddon): name = "max" host_name = "max" - def initialize(self, module_settings): - self.enabled = True - def add_implementation_envs(self, env, _app): # Remove auto screen scale factor for Qt # - let 3dsmax decide it's value diff --git a/client/ayon_core/hosts/max/api/action.py b/client/ayon_core/hosts/max/api/action.py new file mode 100644 index 0000000000..bed72bc493 --- /dev/null +++ b/client/ayon_core/hosts/max/api/action.py @@ -0,0 +1,42 @@ +from pymxs import runtime as rt + +import pyblish.api + +from ayon_core.pipeline.publish import get_errored_instances_from_context + + +class SelectInvalidAction(pyblish.api.Action): + """Select invalid objects in Blender when a publish plug-in failed.""" + label = "Select Invalid" + on = "failed" + icon = "search" + + def process(self, context, plugin): + errored_instances = get_errored_instances_from_context(context, + plugin=plugin) + + # Get the invalid nodes for the plug-ins + self.log.info("Finding invalid nodes...") + invalid = list() + for instance in errored_instances: + invalid_nodes = plugin.get_invalid(instance) + if invalid_nodes: + if isinstance(invalid_nodes, (list, tuple)): + invalid.extend(invalid_nodes) + else: + self.log.warning( + "Failed plug-in doesn't have any selectable objects." + ) + + if not invalid: + self.log.info("No invalid nodes found.") + return + invalid_names = [obj.name for obj in invalid if not isinstance(obj, tuple)] + if not invalid_names: + invalid_names = [obj.name for obj, _ in invalid] + invalid = [obj for obj, _ in invalid] + self.log.info( + "Selecting invalid objects: %s", ", ".join(invalid_names) + ) + + rt.Select(invalid) diff --git a/client/ayon_core/hosts/max/api/pipeline.py b/client/ayon_core/hosts/max/api/pipeline.py index ff5ef0640b..1486f7218d 100644 --- a/client/ayon_core/hosts/max/api/pipeline.py +++ b/client/ayon_core/hosts/max/api/pipeline.py @@ -12,6 +12,7 @@ from ayon_core.pipeline import ( register_creator_plugin_path, register_loader_plugin_path, AVALON_CONTAINER_ID, + AYON_CONTAINER_ID, ) from ayon_core.hosts.max.api.menu import OpenPypeMenu from ayon_core.hosts.max.api import lib @@ -60,9 +61,11 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): rt.callbacks.addScript(rt.Name('filePostOpen'), lib.check_colorspace) - def has_unsaved_changes(self): - # TODO: how to get it from 3dsmax? - return True + rt.callbacks.addScript(rt.Name('postWorkspaceChange'), + self._deferred_menu_creation) + + def workfile_has_unsaved_changes(self): + return rt.getSaveRequired() def get_workfile_extensions(self): return [".max"] @@ -149,7 +152,9 @@ def ls() -> list: objs = rt.objects containers = [ obj for obj in objs - if rt.getUserProp(obj, "id") == AVALON_CONTAINER_ID + if rt.getUserProp(obj, "id") in { + AYON_CONTAINER_ID, AVALON_CONTAINER_ID + } ] for container in sorted(containers, key=attrgetter("name")): @@ -240,3 +245,27 @@ def get_previous_loaded_object(container: str): if str(obj) in sel_list: node_list.append(obj) return node_list + + +def remove_container_data(container_node: str): + """Function to remove container data after updating, switching or deleting it. + + Args: + container_node (str): container node + """ + if container_node.modifiers[0].name == "OP Data": + all_set_members_names = [ + member.node for member + in container_node.modifiers[0].openPypeData.all_handles] + # clean up the children of alembic dummy objects + for current_set_member in all_set_members_names: + shape_list = [members for members in current_set_member.Children + if rt.ClassOf(members) == rt.AlembicObject + or rt.isValidNode(members)] + if shape_list: # noqa + rt.Delete(shape_list) + rt.Delete(current_set_member) + rt.deleteModifier(container_node, container_node.modifiers[0]) + + rt.Delete(container_node) + rt.redrawViews() diff --git a/client/ayon_core/hosts/max/api/plugin.py b/client/ayon_core/hosts/max/api/plugin.py index 3551450c24..4d5d18a42d 100644 --- a/client/ayon_core/hosts/max/api/plugin.py +++ b/client/ayon_core/hosts/max/api/plugin.py @@ -6,7 +6,13 @@ import six from pymxs import runtime as rt from ayon_core.lib import BoolDef -from ayon_core.pipeline import CreatedInstance, Creator, CreatorError +from ayon_core.pipeline import ( + CreatedInstance, + Creator, + CreatorError, + AYON_INSTANCE_ID, + AVALON_INSTANCE_ID, +) from .lib import imprint, lsattr, read @@ -157,19 +163,23 @@ class OpenPypeCreatorError(CreatorError): class MaxCreatorBase(object): @staticmethod - def cache_subsets(shared_data): - if shared_data.get("max_cached_subsets") is not None: + def cache_instance_data(shared_data): + if shared_data.get("max_cached_instances") is not None: return shared_data - shared_data["max_cached_subsets"] = {} - cached_instances = lsattr("id", "pyblish.avalon.instance") + shared_data["max_cached_instances"] = {} + + cached_instances = [] + for id_type in [AYON_INSTANCE_ID, AVALON_INSTANCE_ID]: + cached_instances.extend(lsattr("id", id_type)) + for i in cached_instances: creator_id = rt.GetUserProp(i, "creator_identifier") - if creator_id not in shared_data["max_cached_subsets"]: - shared_data["max_cached_subsets"][creator_id] = [i.name] + if creator_id not in shared_data["max_cached_instances"]: + shared_data["max_cached_instances"][creator_id] = [i.name] else: shared_data[ - "max_cached_subsets"][creator_id].append(i.name) + "max_cached_instances"][creator_id].append(i.name) return shared_data @staticmethod @@ -201,17 +211,17 @@ class MaxCreatorBase(object): class MaxCreator(Creator, MaxCreatorBase): selected_nodes = [] - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): if pre_create_data.get("use_selection"): self.selected_nodes = rt.GetCurrentSelection() - if rt.getNodeByName(subset_name): - raise CreatorError(f"'{subset_name}' is already created..") + if rt.getNodeByName(product_name): + raise CreatorError(f"'{product_name}' is already created..") - instance_node = self.create_instance_node(subset_name) + instance_node = self.create_instance_node(product_name) instance_data["instance_node"] = instance_node.name instance = CreatedInstance( - self.family, - subset_name, + self.product_type, + product_name, instance_data, self ) @@ -238,8 +248,8 @@ class MaxCreator(Creator, MaxCreatorBase): return instance def collect_instances(self): - self.cache_subsets(self.collection_shared_data) - for instance in self.collection_shared_data["max_cached_subsets"].get(self.identifier, []): # noqa + self.cache_instance_data(self.collection_shared_data) + for instance in self.collection_shared_data["max_cached_instances"].get(self.identifier, []): # noqa created_instance = CreatedInstance.from_existing( read(rt.GetNodeByName(instance)), self ) @@ -252,15 +262,15 @@ class MaxCreator(Creator, MaxCreatorBase): key: changes[key].new_value for key in changes.changed_keys } - subset = new_values.get("subset", "") - if subset and instance_node != subset: + product_name = new_values.get("productName", "") + if product_name and instance_node != product_name: node = rt.getNodeByName(instance_node) - new_subset_name = new_values["subset"] - if rt.getNodeByName(new_subset_name): + new_product_name = new_values["productName"] + if rt.getNodeByName(new_product_name): raise CreatorError( - "The subset '{}' already exists.".format( - new_subset_name)) - instance_node = new_subset_name + "The product '{}' already exists.".format( + new_product_name)) + instance_node = new_product_name created_inst["instance_node"] = instance_node node.name = instance_node diff --git a/client/ayon_core/hosts/max/hooks/set_paths.py b/client/ayon_core/hosts/max/hooks/set_paths.py index c18fd29295..0ee1b0dab7 100644 --- a/client/ayon_core/hosts/max/hooks/set_paths.py +++ b/client/ayon_core/hosts/max/hooks/set_paths.py @@ -10,7 +10,7 @@ class SetPath(PreLaunchHook): launch_types = {LaunchTypes.local} def execute(self): - workdir = self.launch_context.env.get("AVALON_WORKDIR", "") + workdir = self.launch_context.env.get("AYON_WORKDIR", "") if not workdir: self.log.warning("BUG: Workdir is not filled.") return diff --git a/client/ayon_core/hosts/max/plugins/create/create_camera.py b/client/ayon_core/hosts/max/plugins/create/create_camera.py index a35d5fc6b9..42f8cb716d 100644 --- a/client/ayon_core/hosts/max/plugins/create/create_camera.py +++ b/client/ayon_core/hosts/max/plugins/create/create_camera.py @@ -7,5 +7,5 @@ class CreateCamera(plugin.MaxCreator): """Creator plugin for Camera.""" identifier = "io.openpype.creators.max.camera" label = "Camera" - family = "camera" + product_type = "camera" icon = "gear" diff --git a/client/ayon_core/hosts/max/plugins/create/create_maxScene.py b/client/ayon_core/hosts/max/plugins/create/create_maxScene.py index 4b8328d38f..0e5768b267 100644 --- a/client/ayon_core/hosts/max/plugins/create/create_maxScene.py +++ b/client/ayon_core/hosts/max/plugins/create/create_maxScene.py @@ -7,5 +7,5 @@ class CreateMaxScene(plugin.MaxCreator): """Creator plugin for 3ds max scenes.""" identifier = "io.openpype.creators.max.maxScene" label = "Max Scene" - family = "maxScene" + product_type = "maxScene" icon = "gear" diff --git a/client/ayon_core/hosts/max/plugins/create/create_model.py b/client/ayon_core/hosts/max/plugins/create/create_model.py index 73f0260807..297c92067e 100644 --- a/client/ayon_core/hosts/max/plugins/create/create_model.py +++ b/client/ayon_core/hosts/max/plugins/create/create_model.py @@ -7,5 +7,5 @@ class CreateModel(plugin.MaxCreator): """Creator plugin for Model.""" identifier = "io.openpype.creators.max.model" label = "Model" - family = "model" + product_type = "model" icon = "gear" diff --git a/client/ayon_core/hosts/max/plugins/create/create_pointcache.py b/client/ayon_core/hosts/max/plugins/create/create_pointcache.py index d28f5008e5..eb0686a0c0 100644 --- a/client/ayon_core/hosts/max/plugins/create/create_pointcache.py +++ b/client/ayon_core/hosts/max/plugins/create/create_pointcache.py @@ -7,5 +7,5 @@ class CreatePointCache(plugin.MaxCreator): """Creator plugin for Point caches.""" identifier = "io.openpype.creators.max.pointcache" label = "Point Cache" - family = "pointcache" + product_type = "pointcache" icon = "gear" diff --git a/client/ayon_core/hosts/max/plugins/create/create_pointcloud.py b/client/ayon_core/hosts/max/plugins/create/create_pointcloud.py index aa6be04da4..9a58f4e624 100644 --- a/client/ayon_core/hosts/max/plugins/create/create_pointcloud.py +++ b/client/ayon_core/hosts/max/plugins/create/create_pointcloud.py @@ -7,5 +7,5 @@ class CreatePointCloud(plugin.MaxCreator): """Creator plugin for Point Clouds.""" identifier = "io.openpype.creators.max.pointcloud" label = "Point Cloud" - family = "pointcloud" + product_type = "pointcloud" icon = "gear" diff --git a/client/ayon_core/hosts/max/plugins/create/create_redshift_proxy.py b/client/ayon_core/hosts/max/plugins/create/create_redshift_proxy.py index e524e85cf6..17f5349dc1 100644 --- a/client/ayon_core/hosts/max/plugins/create/create_redshift_proxy.py +++ b/client/ayon_core/hosts/max/plugins/create/create_redshift_proxy.py @@ -1,11 +1,10 @@ # -*- coding: utf-8 -*- """Creator plugin for creating camera.""" from ayon_core.hosts.max.api import plugin -from ayon_core.pipeline import CreatedInstance class CreateRedshiftProxy(plugin.MaxCreator): identifier = "io.openpype.creators.max.redshiftproxy" label = "Redshift Proxy" - family = "redshiftproxy" + product_type = "redshiftproxy" icon = "gear" diff --git a/client/ayon_core/hosts/max/plugins/create/create_render.py b/client/ayon_core/hosts/max/plugins/create/create_render.py index 73c18bfb4b..60fe628a5e 100644 --- a/client/ayon_core/hosts/max/plugins/create/create_render.py +++ b/client/ayon_core/hosts/max/plugins/create/create_render.py @@ -10,10 +10,10 @@ class CreateRender(plugin.MaxCreator): """Creator plugin for Renders.""" identifier = "io.openpype.creators.max.render" label = "Render" - family = "maxrender" + product_type = "maxrender" icon = "gear" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): from pymxs import runtime as rt file = rt.maxFileName filename, _ = os.path.splitext(file) @@ -24,7 +24,7 @@ class CreateRender(plugin.MaxCreator): rt.batchRenderMgr.DeleteView(num_of_renderlayer) instance = super(CreateRender, self).create( - subset_name, + product_name, instance_data, pre_create_data) diff --git a/client/ayon_core/hosts/max/plugins/create/create_review.py b/client/ayon_core/hosts/max/plugins/create/create_review.py index a757b3b5bd..0a0ffd2e46 100644 --- a/client/ayon_core/hosts/max/plugins/create/create_review.py +++ b/client/ayon_core/hosts/max/plugins/create/create_review.py @@ -9,7 +9,7 @@ class CreateReview(plugin.MaxCreator): identifier = "io.openpype.creators.max.review" label = "Review" - family = "review" + product_type = "review" icon = "video-camera" review_width = 1920 @@ -38,7 +38,7 @@ class CreateReview(plugin.MaxCreator): "anti_aliasing", self.anti_aliasing) self.vp_texture = settings.get("vp_texture", self.vp_texture) - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): # Transfer settings from pre create to instance creator_attributes = instance_data.setdefault( "creator_attributes", dict()) @@ -55,7 +55,7 @@ class CreateReview(plugin.MaxCreator): creator_attributes[key] = pre_create_data[key] super(CreateReview, self).create( - subset_name, + product_name, instance_data, pre_create_data) diff --git a/client/ayon_core/hosts/max/plugins/create/create_tycache.py b/client/ayon_core/hosts/max/plugins/create/create_tycache.py index 81ccd3607c..2b3893bf13 100644 --- a/client/ayon_core/hosts/max/plugins/create/create_tycache.py +++ b/client/ayon_core/hosts/max/plugins/create/create_tycache.py @@ -7,5 +7,5 @@ class CreateTyCache(plugin.MaxCreator): """Creator plugin for TyCache.""" identifier = "io.openpype.creators.max.tycache" label = "TyCache" - family = "tycache" + product_type = "tycache" icon = "gear" diff --git a/client/ayon_core/hosts/max/plugins/create/create_workfile.py b/client/ayon_core/hosts/max/plugins/create/create_workfile.py new file mode 100644 index 0000000000..1552149413 --- /dev/null +++ b/client/ayon_core/hosts/max/plugins/create/create_workfile.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating workfiles.""" +from ayon_core.pipeline import CreatedInstance, AutoCreator +from ayon_core.client import get_asset_by_name, get_asset_name_identifier +from ayon_core.hosts.max.api import plugin +from ayon_core.hosts.max.api.lib import read, imprint +from pymxs import runtime as rt + + +class CreateWorkfile(plugin.MaxCreatorBase, AutoCreator): + """Workfile auto-creator.""" + identifier = "io.ayon.creators.max.workfile" + label = "Workfile" + product_type = "workfile" + icon = "fa5.file" + + default_variant = "Main" + + def create(self): + variant = self.default_variant + current_instance = next( + ( + instance for instance in self.create_context.instances + if instance.creator_identifier == self.identifier + ), None) + project_name = self.project_name + asset_name = self.create_context.get_current_asset_name() + task_name = self.create_context.get_current_task_name() + host_name = self.create_context.host_name + + if current_instance is None: + asset_doc = get_asset_by_name(project_name, asset_name) + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + variant, + host_name, + ) + data = { + "folderPath": asset_name, + "task": task_name, + "variant": variant + } + + data.update( + self.get_dynamic_data( + project_name, + asset_doc, + task_name, + variant, + host_name, + current_instance) + ) + self.log.info("Auto-creating workfile instance...") + instance_node = self.create_node(product_name) + data["instance_node"] = instance_node.name + current_instance = CreatedInstance( + self.product_type, product_name, data, self + ) + self._add_instance_to_context(current_instance) + imprint(instance_node.name, current_instance.data) + elif ( + current_instance["folderPath"] != asset_name + or current_instance["task"] != task_name + ): + # Update instance context if is not the same + asset_doc = get_asset_by_name(project_name, asset_name) + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + variant, + host_name, + ) + asset_name = get_asset_name_identifier(asset_doc) + + current_instance["folderPath"] = asset_name + current_instance["task"] = task_name + current_instance["productName"] = product_name + + def collect_instances(self): + self.cache_instance_data(self.collection_shared_data) + cached_instances = self.collection_shared_data["max_cached_instances"] + for instance in cached_instances.get(self.identifier, []): + if not rt.getNodeByName(instance): + continue + created_instance = CreatedInstance.from_existing( + read(rt.GetNodeByName(instance)), self + ) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + for created_inst, _ in update_list: + instance_node = created_inst.get("instance_node") + imprint( + instance_node, + created_inst.data_to_store() + ) + + def remove_instances(self, instances): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. + + """ + for instance in instances: + instance_node = rt.GetNodeByName( + instance.data.get("instance_node")) + if instance_node: + rt.Delete(instance_node) + + self._remove_instance_from_context(instance) + + def create_node(self, product_name): + if rt.getNodeByName(product_name): + node = rt.getNodeByName(product_name) + return node + node = rt.Container(name=product_name) + node.isHidden = True + return node diff --git a/client/ayon_core/hosts/max/plugins/load/load_camera_fbx.py b/client/ayon_core/hosts/max/plugins/load/load_camera_fbx.py index 34b120c179..d56445c695 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_camera_fbx.py +++ b/client/ayon_core/hosts/max/plugins/load/load_camera_fbx.py @@ -1,6 +1,6 @@ import os -from ayon_core.hosts.max.api import lib, maintained_selection +from ayon_core.hosts.max.api import lib from ayon_core.hosts.max.api.lib import ( unique_namespace, get_namespace, @@ -9,7 +9,8 @@ from ayon_core.hosts.max.api.lib import ( from ayon_core.hosts.max.api.pipeline import ( containerise, get_previous_loaded_object, - update_custom_attribute_data + update_custom_attribute_data, + remove_container_data ) from ayon_core.pipeline import get_representation_path, load @@ -50,10 +51,11 @@ class FbxLoader(load.LoaderPlugin): name, selections, context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) node_name = container["instance_node"] node = rt.getNodeByName(node_name) namespace, _ = get_namespace(node_name) @@ -86,14 +88,14 @@ class FbxLoader(load.LoaderPlugin): update_custom_attribute_data(node, fbx_objects) lib.imprint(container["instance_node"], { - "representation": str(representation["_id"]) + "representation": str(repre_doc["_id"]) }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt node = rt.GetNodeByName(container["instance_node"]) - rt.Delete(node) + remove_container_data(node) diff --git a/client/ayon_core/hosts/max/plugins/load/load_max_scene.py b/client/ayon_core/hosts/max/plugins/load/load_max_scene.py index 7267d7a59e..39bb3b568d 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_max_scene.py +++ b/client/ayon_core/hosts/max/plugins/load/load_max_scene.py @@ -8,7 +8,8 @@ from ayon_core.hosts.max.api.lib import ( ) from ayon_core.hosts.max.api.pipeline import ( containerise, get_previous_loaded_object, - update_custom_attribute_data + update_custom_attribute_data, + remove_container_data ) from ayon_core.pipeline import get_representation_path, load @@ -47,10 +48,11 @@ class MaxSceneLoader(load.LoaderPlugin): name, max_container, context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) node_name = container["instance_node"] node = rt.getNodeByName(node_name) namespace, _ = get_namespace(node_name) @@ -85,14 +87,13 @@ class MaxSceneLoader(load.LoaderPlugin): update_custom_attribute_data(node, max_objects) lib.imprint(container["instance_node"], { - "representation": str(representation["_id"]) + "representation": str(repre_doc["_id"]) }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt - node = rt.GetNodeByName(container["instance_node"]) - rt.Delete(node) + remove_container_data(node) diff --git a/client/ayon_core/hosts/max/plugins/load/load_model.py b/client/ayon_core/hosts/max/plugins/load/load_model.py index 796e1b80ad..e0241bdb73 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model.py @@ -2,7 +2,8 @@ import os from ayon_core.pipeline import load, get_representation_path from ayon_core.hosts.max.api.pipeline import ( containerise, - get_previous_loaded_object + get_previous_loaded_object, + remove_container_data ) from ayon_core.hosts.max.api import lib from ayon_core.hosts.max.api.lib import ( @@ -69,10 +70,11 @@ class ModelAbcLoader(load.LoaderPlugin): namespace, loader=self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) node = rt.GetNodeByName(container["instance_node"]) node_list = [n for n in get_previous_loaded_object(node) if rt.ClassOf(n) == rt.AlembicContainer] @@ -89,17 +91,17 @@ class ModelAbcLoader(load.LoaderPlugin): abc_obj.source = path lib.imprint( container["instance_node"], - {"representation": str(representation["_id"])}, + {"representation": str(repre_doc["_id"])}, ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt - node = rt.GetNodeByName(container["instance_node"]) - rt.Delete(node) + remove_container_data(node) + @staticmethod def get_container_children(parent, type_name): diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py b/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py index 827cf63b39..03ba901b32 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py @@ -2,7 +2,8 @@ import os from ayon_core.pipeline import load, get_representation_path from ayon_core.hosts.max.api.pipeline import ( containerise, get_previous_loaded_object, - update_custom_attribute_data + update_custom_attribute_data, + remove_container_data ) from ayon_core.hosts.max.api import lib from ayon_core.hosts.max.api.lib import ( @@ -46,10 +47,11 @@ class FbxModelLoader(load.LoaderPlugin): name, selections, context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) node_name = container["instance_node"] node = rt.getNodeByName(node_name) if not node: @@ -84,14 +86,13 @@ class FbxModelLoader(load.LoaderPlugin): rt.Select(node) update_custom_attribute_data(node, fbx_objects) lib.imprint(container["instance_node"], { - "representation": str(representation["_id"]) + "representation": str(repre_doc["_id"]) }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt - node = rt.GetNodeByName(container["instance_node"]) - rt.Delete(node) + remove_container_data(node) diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_obj.py b/client/ayon_core/hosts/max/plugins/load/load_model_obj.py index 22d3d4b58a..a6c3d2a2fe 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_obj.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_obj.py @@ -11,7 +11,8 @@ from ayon_core.hosts.max.api.lib import maintained_selection from ayon_core.hosts.max.api.pipeline import ( containerise, get_previous_loaded_object, - update_custom_attribute_data + update_custom_attribute_data, + remove_container_data ) from ayon_core.pipeline import get_representation_path, load @@ -46,10 +47,11 @@ class ObjLoader(load.LoaderPlugin): name, selections, context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) node_name = container["instance_node"] node = rt.getNodeByName(node_name) namespace, _ = get_namespace(node_name) @@ -76,14 +78,13 @@ class ObjLoader(load.LoaderPlugin): rt.Select(node) lib.imprint(node_name, { - "representation": str(representation["_id"]) + "representation": str(repre_doc["_id"]) }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt - node = rt.GetNodeByName(container["instance_node"]) - rt.Delete(node) + remove_container_data(node) diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_usd.py b/client/ayon_core/hosts/max/plugins/load/load_model_usd.py index 8d42219217..6673a2e48b 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_usd.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_usd.py @@ -13,7 +13,8 @@ from ayon_core.hosts.max.api.lib import maintained_selection from ayon_core.hosts.max.api.pipeline import ( containerise, get_previous_loaded_object, - update_custom_attribute_data + update_custom_attribute_data, + remove_container_data ) from ayon_core.pipeline import get_representation_path, load @@ -64,8 +65,9 @@ class ModelUSDLoader(load.LoaderPlugin): name, usd_objects, context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): - path = get_representation_path(representation) + def update(self, container, context): + repre_doc = context["representation"] + path = get_representation_path(repre_doc) node_name = container["instance_node"] node = rt.GetNodeByName(node_name) namespace, name = get_namespace(node_name) @@ -106,12 +108,13 @@ class ModelUSDLoader(load.LoaderPlugin): rt.Select(node) lib.imprint(node_name, { - "representation": str(representation["_id"]) + "representation": str(repre_doc["_id"]) }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): + from pymxs import runtime as rt node = rt.GetNodeByName(container["instance_node"]) - rt.Delete(node) + remove_container_data(node) diff --git a/client/ayon_core/hosts/max/plugins/load/load_pointcache.py b/client/ayon_core/hosts/max/plugins/load/load_pointcache.py index a92fa66757..6f79caea42 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_pointcache.py +++ b/client/ayon_core/hosts/max/plugins/load/load_pointcache.py @@ -10,7 +10,8 @@ from ayon_core.hosts.max.api import lib, maintained_selection from ayon_core.hosts.max.api.lib import unique_namespace from ayon_core.hosts.max.api.pipeline import ( containerise, - get_previous_loaded_object + get_previous_loaded_object, + remove_container_data ) @@ -75,10 +76,11 @@ class AbcLoader(load.LoaderPlugin): namespace, loader=self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) node = rt.GetNodeByName(container["instance_node"]) abc_container = [n for n in get_previous_loaded_object(node) if rt.ClassOf(n) == rt.AlembicContainer] @@ -95,17 +97,17 @@ class AbcLoader(load.LoaderPlugin): abc_obj.source = path lib.imprint( container["instance_node"], - {"representation": str(representation["_id"])}, + {"representation": str(repre_doc["_id"])}, ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt - node = rt.GetNodeByName(container["instance_node"]) - rt.Delete(node) + remove_container_data(node) + @staticmethod def get_container_children(parent, type_name): diff --git a/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py b/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py index 27b2e271d2..67d1374266 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py +++ b/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py @@ -4,7 +4,8 @@ from ayon_core.pipeline.load import LoadError from ayon_core.hosts.max.api.pipeline import ( containerise, get_previous_loaded_object, - update_custom_attribute_data + update_custom_attribute_data, + remove_container_data ) from ayon_core.hosts.max.api.lib import ( @@ -61,8 +62,9 @@ class OxAbcLoader(load.LoaderPlugin): namespace, loader=self.__class__.__name__ ) - def update(self, container, representation): - path = get_representation_path(representation) + def update(self, container, context): + repre_doc = context["representation"] + path = get_representation_path(repre_doc) node_name = container["instance_node"] namespace, name = get_namespace(node_name) node = rt.getNodeByName(node_name) @@ -97,12 +99,13 @@ class OxAbcLoader(load.LoaderPlugin): update_custom_attribute_data(node, ox_abc_objects) lib.imprint( container["instance_node"], - {"representation": str(representation["_id"])}, + {"representation": str(repre_doc["_id"])}, ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): + from pymxs import runtime as rt node = rt.GetNodeByName(container["instance_node"]) - rt.Delete(node) + remove_container_data(node) diff --git a/client/ayon_core/hosts/max/plugins/load/load_pointcloud.py b/client/ayon_core/hosts/max/plugins/load/load_pointcloud.py index 45e3da5621..894648ff23 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_pointcloud.py +++ b/client/ayon_core/hosts/max/plugins/load/load_pointcloud.py @@ -8,7 +8,8 @@ from ayon_core.hosts.max.api.lib import ( from ayon_core.hosts.max.api.pipeline import ( containerise, get_previous_loaded_object, - update_custom_attribute_data + update_custom_attribute_data, + remove_container_data ) from ayon_core.pipeline import get_representation_path, load @@ -40,11 +41,12 @@ class PointCloudLoader(load.LoaderPlugin): name, [obj], context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): """update the container""" from pymxs import runtime as rt - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) node = rt.GetNodeByName(container["instance_node"]) node_list = get_previous_loaded_object(node) update_custom_attribute_data( @@ -54,15 +56,14 @@ class PointCloudLoader(load.LoaderPlugin): for prt in rt.Selection: prt.filename = path lib.imprint(container["instance_node"], { - "representation": str(representation["_id"]) + "representation": str(repre_doc["_id"]) }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): """remove the container""" from pymxs import runtime as rt - node = rt.GetNodeByName(container["instance_node"]) - rt.Delete(node) + remove_container_data(node) diff --git a/client/ayon_core/hosts/max/plugins/load/load_redshift_proxy.py b/client/ayon_core/hosts/max/plugins/load/load_redshift_proxy.py index 3f73210c24..7395a6eca5 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_redshift_proxy.py +++ b/client/ayon_core/hosts/max/plugins/load/load_redshift_proxy.py @@ -9,7 +9,8 @@ from ayon_core.pipeline.load import LoadError from ayon_core.hosts.max.api.pipeline import ( containerise, update_custom_attribute_data, - get_previous_loaded_object + get_previous_loaded_object, + remove_container_data ) from ayon_core.hosts.max.api import lib from ayon_core.hosts.max.api.lib import ( @@ -51,10 +52,11 @@ class RedshiftProxyLoader(load.LoaderPlugin): name, [rs_proxy], context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) node = rt.getNodeByName(container["instance_node"]) node_list = get_previous_loaded_object(node) rt.Select(node_list) @@ -64,14 +66,13 @@ class RedshiftProxyLoader(load.LoaderPlugin): proxy.file = path lib.imprint(container["instance_node"], { - "representation": str(representation["_id"]) + "representation": str(repre_doc["_id"]) }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt - - node = rt.getNodeByName(container["instance_node"]) - rt.delete(node) + node = rt.GetNodeByName(container["instance_node"]) + remove_container_data(node) diff --git a/client/ayon_core/hosts/max/plugins/load/load_tycache.py b/client/ayon_core/hosts/max/plugins/load/load_tycache.py index 48fb5c447a..5acc759b4a 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_tycache.py +++ b/client/ayon_core/hosts/max/plugins/load/load_tycache.py @@ -7,7 +7,8 @@ from ayon_core.hosts.max.api.lib import ( from ayon_core.hosts.max.api.pipeline import ( containerise, get_previous_loaded_object, - update_custom_attribute_data + update_custom_attribute_data, + remove_container_data ) from ayon_core.pipeline import get_representation_path, load @@ -38,11 +39,12 @@ class TyCacheLoader(load.LoaderPlugin): name, [obj], context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): """update the container""" from pymxs import runtime as rt - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) node = rt.GetNodeByName(container["instance_node"]) node_list = get_previous_loaded_object(node) update_custom_attribute_data(node, node_list) @@ -50,15 +52,14 @@ class TyCacheLoader(load.LoaderPlugin): for tyc in node_list: tyc.filename = path lib.imprint(container["instance_node"], { - "representation": str(representation["_id"]) + "representation": str(repre_doc["_id"]) }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): """remove the container""" from pymxs import runtime as rt - node = rt.GetNodeByName(container["instance_node"]) - rt.Delete(node) + remove_container_data(node) diff --git a/client/ayon_core/hosts/max/plugins/publish/collect_current_file.py b/client/ayon_core/hosts/max/plugins/publish/collect_current_file.py new file mode 100644 index 0000000000..6f8b8dda4b --- /dev/null +++ b/client/ayon_core/hosts/max/plugins/publish/collect_current_file.py @@ -0,0 +1,23 @@ +import os +import pyblish.api + +from pymxs import runtime as rt + + +class CollectCurrentFile(pyblish.api.ContextPlugin): + """Inject the current working file.""" + + order = pyblish.api.CollectorOrder - 0.5 + label = "Max Current File" + hosts = ['max'] + + def process(self, context): + """Inject the current working file""" + folder = rt.maxFilePath + file = rt.maxFileName + if not folder or not file: + self.log.error("Scene is not saved.") + current_file = os.path.join(folder, file) + + context.data["currentFile"] = current_file + self.log.debug("Scene path: {}".format(current_file)) diff --git a/client/ayon_core/hosts/max/plugins/publish/collect_frame_range.py b/client/ayon_core/hosts/max/plugins/publish/collect_frame_range.py index 86fb6e856c..6fc8de90d1 100644 --- a/client/ayon_core/hosts/max/plugins/publish/collect_frame_range.py +++ b/client/ayon_core/hosts/max/plugins/publish/collect_frame_range.py @@ -14,7 +14,7 @@ class CollectFrameRange(pyblish.api.InstancePlugin): "review", "redshiftproxy"] def process(self, instance): - if instance.data["family"] == "maxrender": + if instance.data["productType"] == "maxrender": instance.data["frameStartHandle"] = int(rt.rendStart) instance.data["frameEndHandle"] = int(rt.rendEnd) else: diff --git a/client/ayon_core/hosts/max/plugins/publish/collect_members.py b/client/ayon_core/hosts/max/plugins/publish/collect_members.py index 2970cf0e24..010b3cd3e1 100644 --- a/client/ayon_core/hosts/max/plugins/publish/collect_members.py +++ b/client/ayon_core/hosts/max/plugins/publish/collect_members.py @@ -12,7 +12,11 @@ class CollectMembers(pyblish.api.InstancePlugin): hosts = ['max'] def process(self, instance): - + if instance.data["productType"] == "workfile": + self.log.debug( + "Skipping Collecting Members for workfile product type." + ) + return if instance.data.get("instance_node"): container = rt.GetNodeByName(instance.data["instance_node"]) instance.data["members"] = [ diff --git a/client/ayon_core/hosts/max/plugins/publish/collect_render.py b/client/ayon_core/hosts/max/plugins/publish/collect_render.py index a97e8a154e..4ad9dfb3a3 100644 --- a/client/ayon_core/hosts/max/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/max/plugins/publish/collect_render.py @@ -92,15 +92,17 @@ class CollectRender(pyblish.api.InstancePlugin): instance.data["attachTo"] = [] renderer_class = get_current_renderer() renderer = str(renderer_class).split(":")[0] + product_type = "maxrender" # also need to get the render dir for conversion data = { - "asset": instance.data["asset"], - "subset": str(instance.name), + "folderPath": instance.data["folderPath"], + "productName": str(instance.name), "publish": True, "maxversion": str(get_max_version()), "imageFormat": img_format, - "family": 'maxrender', - "families": ['maxrender'], + "productType": product_type, + "family": product_type, + "families": [product_type], "renderer": renderer, "source": filepath, "plugin": "3dsmax", diff --git a/client/ayon_core/hosts/max/plugins/publish/collect_workfile.py b/client/ayon_core/hosts/max/plugins/publish/collect_workfile.py index 0eb4bb731e..6eec0f7292 100644 --- a/client/ayon_core/hosts/max/plugins/publish/collect_workfile.py +++ b/client/ayon_core/hosts/max/plugins/publish/collect_workfile.py @@ -6,57 +6,41 @@ import pyblish.api from pymxs import runtime as rt -class CollectWorkfile(pyblish.api.ContextPlugin): +class CollectWorkfile(pyblish.api.InstancePlugin): """Inject the current working file into context""" order = pyblish.api.CollectorOrder - 0.01 label = "Collect 3dsmax Workfile" hosts = ['max'] + families = ["workfile"] - def process(self, context): + def process(self, instance): """Inject the current working file.""" + context = instance.context folder = rt.maxFilePath file = rt.maxFileName if not folder or not file: self.log.error("Scene is not saved.") - current_file = os.path.join(folder, file) - - context.data['currentFile'] = current_file - - filename, ext = os.path.splitext(file) - - task = context.data["task"] + ext = os.path.splitext(file)[-1].lstrip(".") data = {} - # create instance - instance = context.create_instance(name=filename) - subset = 'workfile' + task.capitalize() - data.update({ - "subset": subset, - "asset": context.data["asset"], - "label": subset, - "publish": True, - "family": 'workfile', - "families": ['workfile'], - "setMembers": [current_file], - "frameStart": context.data['frameStart'], - "frameEnd": context.data['frameEnd'], - "handleStart": context.data['handleStart'], - "handleEnd": context.data['handleEnd'] + "setMembers": context.data["currentFile"], + "frameStart": context.data["frameStart"], + "frameEnd": context.data["frameEnd"], + "handleStart": context.data["handleStart"], + "handleEnd": context.data["handleEnd"] }) - data['representations'] = [{ - 'name': ext.lstrip("."), - 'ext': ext.lstrip("."), - 'files': file, + data["representations"] = [{ + "name": ext, + "ext": ext, + "files": file, "stagingDir": folder, }] instance.data.update(data) - - self.log.info('Collected instance: {}'.format(file)) - self.log.info('Scene path: {}'.format(current_file)) - self.log.info('staging Dir: {}'.format(folder)) - self.log.info('subset: {}'.format(subset)) + self.log.debug("Collected data: {}".format(data)) + self.log.debug("Collected instance: {}".format(file)) + self.log.debug("staging Dir: {}".format(folder)) diff --git a/client/ayon_core/hosts/max/plugins/publish/extract_pointcloud.py b/client/ayon_core/hosts/max/plugins/publish/extract_pointcloud.py index 294d63794e..67dde7f0a6 100644 --- a/client/ayon_core/hosts/max/plugins/publish/extract_pointcloud.py +++ b/client/ayon_core/hosts/max/plugins/publish/extract_pointcloud.py @@ -155,7 +155,9 @@ class ExtractPointCloud(publish.Extractor): custom_attr_list = [] attr_settings = self.settings["attribute"] - for key, value in attr_settings.items(): + for attr in attr_settings: + key = attr["name"] + value = attr["value"] custom_attr = "{0}.PRTChannels_{1}=True".format(operator, value) self.log.debug( diff --git a/client/ayon_core/hosts/max/plugins/publish/save_scene.py b/client/ayon_core/hosts/max/plugins/publish/save_scene.py index a40788ab41..1c59335ceb 100644 --- a/client/ayon_core/hosts/max/plugins/publish/save_scene.py +++ b/client/ayon_core/hosts/max/plugins/publish/save_scene.py @@ -1,11 +1,9 @@ import pyblish.api -import os +from ayon_core.pipeline import registered_host class SaveCurrentScene(pyblish.api.ContextPlugin): - """Save current scene - - """ + """Save current scene""" label = "Save current file" order = pyblish.api.ExtractorOrder - 0.49 @@ -13,9 +11,13 @@ class SaveCurrentScene(pyblish.api.ContextPlugin): families = ["maxrender", "workfile"] def process(self, context): - from pymxs import runtime as rt - folder = rt.maxFilePath - file = rt.maxFileName - current = os.path.join(folder, file) - assert context.data["currentFile"] == current - rt.saveMaxFile(current) + host = registered_host() + current_file = host.get_current_workfile() + + assert context.data["currentFile"] == current_file + + if host.workfile_has_unsaved_changes(): + self.log.info(f"Saving current file: {current_file}") + host.save_workfile(current_file) + else: + self.log.debug("No unsaved changes, skipping file save..") \ No newline at end of file diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_attributes.py b/client/ayon_core/hosts/max/plugins/publish/validate_attributes.py index 444a8f0829..354539871f 100644 --- a/client/ayon_core/hosts/max/plugins/publish/validate_attributes.py +++ b/client/ayon_core/hosts/max/plugins/publish/validate_attributes.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- """Validator for Attributes.""" +import json + from pyblish.api import ContextPlugin, ValidatorOrder from pymxs import runtime as rt @@ -61,9 +63,13 @@ class ValidateAttributes(OptionalPyblishPluginMixin, @classmethod def get_invalid(cls, context): - attributes = ( - context.data["project_settings"]["max"]["publish"] - ["ValidateAttributes"]["attributes"] + attributes = json.loads( + context.data + ["project_settings"] + ["max"] + ["publish"] + ["ValidateAttributes"] + ["attributes"] ) if not attributes: return @@ -112,9 +118,13 @@ class ValidateAttributes(OptionalPyblishPluginMixin, @classmethod def repair(cls, context): - attributes = ( - context.data["project_settings"]["max"]["publish"] - ["ValidateAttributes"]["attributes"] + attributes = json.loads( + context.data + ["project_settings"] + ["max"] + ["publish"] + ["ValidateAttributes"] + ["attributes"] ) invalid_attributes = cls.get_invalid(context) for attrs in invalid_attributes: diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_camera_attributes.py b/client/ayon_core/hosts/max/plugins/publish/validate_camera_attributes.py new file mode 100644 index 0000000000..9398cba2b7 --- /dev/null +++ b/client/ayon_core/hosts/max/plugins/publish/validate_camera_attributes.py @@ -0,0 +1,88 @@ +import pyblish.api +from pymxs import runtime as rt + +from ayon_core.pipeline.publish import ( + RepairAction, + OptionalPyblishPluginMixin, + PublishValidationError +) +from ayon_core.hosts.max.api.action import SelectInvalidAction + + +class ValidateCameraAttributes(OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin): + """Validates Camera has no invalid attribute properties + or values.(For 3dsMax Cameras only) + + """ + + order = pyblish.api.ValidatorOrder + families = ['camera'] + hosts = ['max'] + label = 'Validate Camera Attributes' + actions = [SelectInvalidAction, RepairAction] + optional = True + + DEFAULTS = ["fov", "nearrange", "farrange", + "nearclip", "farclip"] + CAM_TYPE = ["Freecamera", "Targetcamera", + "Physical"] + + @classmethod + def get_invalid(cls, instance): + invalid = [] + if rt.units.DisplayType != rt.Name("Generic"): + cls.log.warning( + "Generic Type is not used as a scene unit\n\n" + "sure you tweak the settings with your own values\n\n" + "before validation.") + cameras = instance.data["members"] + project_settings = instance.context.data["project_settings"].get("max") + cam_attr_settings = ( + project_settings["publish"]["ValidateCameraAttributes"] + ) + for camera in cameras: + if str(rt.ClassOf(camera)) not in cls.CAM_TYPE: + cls.log.debug( + "Skipping camera created from external plugin..") + continue + for attr in cls.DEFAULTS: + default_value = cam_attr_settings.get(attr) + if default_value == float(0): + cls.log.debug( + f"the value of {attr} in setting set to" + " zero. Skipping the check.") + continue + if round(rt.getProperty(camera, attr), 1) != default_value: + cls.log.error( + f"Invalid attribute value for {camera.name}:{attr} " + f"(should be: {default_value}))") + invalid.append(camera) + + return invalid + + def process(self, instance): + if not self.is_active(instance.data): + self.log.debug("Skipping Validate Camera Attributes.") + return + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError( + "Invalid camera attributes found. See log.") + + @classmethod + def repair(cls, instance): + invalid_cameras = cls.get_invalid(instance) + project_settings = instance.context.data["project_settings"].get("max") + cam_attr_settings = ( + project_settings["publish"]["ValidateCameraAttributes"] + ) + for camera in invalid_cameras: + for attr in cls.DEFAULTS: + expected_value = cam_attr_settings.get(attr) + if expected_value == float(0): + cls.log.debug( + f"the value of {attr} in setting set to zero.") + continue + rt.setProperty(camera, attr, expected_value) diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_frame_range.py b/client/ayon_core/hosts/max/plugins/publish/validate_frame_range.py index 75a83c2b05..22fda37e61 100644 --- a/client/ayon_core/hosts/max/plugins/publish/validate_frame_range.py +++ b/client/ayon_core/hosts/max/plugins/publish/validate_frame_range.py @@ -81,7 +81,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin, frame_start_handle = frame_range["frameStartHandle"] frame_end_handle = frame_range["frameEndHandle"] - if instance.data["family"] == "maxrender": + if instance.data["productType"] == "maxrender": rt.rendStart = frame_start_handle rt.rendEnd = frame_end_handle else: diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py b/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py new file mode 100644 index 0000000000..963a601009 --- /dev/null +++ b/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +"""Validate if instance context is the same as current context.""" +import pyblish.api +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) +from ayon_core.hosts.max.api.action import SelectInvalidAction +from pymxs import runtime as rt + + +class ValidateInstanceInContext(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validator to check if instance context match current context. + + When working in per-shot style you always publish data in context of + current context (shot). This validator checks if this is so. It is optional + so it can be disabled when needed. + + Action on this validator will select invalid instances. + """ + order = ValidateContentsOrder + label = "Instance in same Context" + optional = True + hosts = ["max"] + actions = [SelectInvalidAction, RepairAction] + + def process(self, instance): + if not self.is_active(instance.data): + return + + folderPath = instance.data.get("folderPath") + task = instance.data.get("task") + context = self.get_context(instance) + if (folderPath, task) != context: + context_label = "{} > {}".format(*context) + instance_label = "{} > {}".format(folderPath, task) + message = ( + "Instance '{}' publishes to different context than current " + "context: {}. Current context: {}".format( + instance.name, instance_label, context_label + ) + ) + raise PublishValidationError( + message=message, + description=( + "## Publishing to a different context data\n" + "There are publish instances present which are publishing " + "into a different folder path or task than your current context.\n\n" + "Usually this is not what you want but there can be cases " + "where you might want to publish into another context or " + "shot. If that's the case you can disable the validation " + "on the instance to ignore it." + ) + ) + + @classmethod + def get_invalid(cls, instance): + invalid = [] + folderPath = instance.data.get("folderPath") + task = instance.data.get("task") + context = cls.get_context(instance) + if (folderPath, task) != context: + invalid.append(rt.getNodeByName(instance.name)) + return invalid + + @classmethod + def repair(cls, instance): + context_asset = instance.context.data["folderPath"] + context_task = instance.context.data["task"] + instance_node = rt.getNodeByName(instance.data.get( + "instance_node", "")) + if not instance_node: + return + rt.SetUserProp(instance_node, "folderPath", context_asset) + rt.SetUserProp(instance_node, "task", context_task) + + @staticmethod + def get_context(instance): + """Return asset, task from publishing context data""" + context = instance.context + return context.data["folderPath"], context.data["task"] diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_loaded_plugin.py b/client/ayon_core/hosts/max/plugins/publish/validate_loaded_plugin.py index fe6e32b27b..e278041b6b 100644 --- a/client/ayon_core/hosts/max/plugins/publish/validate_loaded_plugin.py +++ b/client/ayon_core/hosts/max/plugins/publish/validate_loaded_plugin.py @@ -25,7 +25,7 @@ class ValidateLoadedPlugin(OptionalPyblishPluginMixin, optional = True actions = [RepairAction] - family_plugins_mapping = {} + family_plugins_mapping = [] @classmethod def get_invalid(cls, instance): @@ -34,9 +34,15 @@ class ValidateLoadedPlugin(OptionalPyblishPluginMixin, if not family_plugins_mapping: return + # Backward compatibility - settings did have 'product_types' + if "product_types" in family_plugins_mapping: + family_plugins_mapping["families"] = family_plugins_mapping.pop( + "product_types" + ) + invalid = [] # Find all plug-in requirements for current instance - instance_families = {instance.data["family"]} + instance_families = {instance.data["productType"]} instance_families.update(instance.data.get("families", [])) cls.log.debug("Checking plug-in validation " f"for instance families: {instance_families}") @@ -47,7 +53,9 @@ class ValidateLoadedPlugin(OptionalPyblishPluginMixin, if not mapping: return - match_families = {fam.strip() for fam in mapping["families"]} + match_families = { + fam.strip() for fam in mapping["families"] + } has_match = "*" in match_families or match_families.intersection( instance_families) diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_mesh_has_uv.py b/client/ayon_core/hosts/max/plugins/publish/validate_mesh_has_uv.py new file mode 100644 index 0000000000..109b7fe0b5 --- /dev/null +++ b/client/ayon_core/hosts/max/plugins/publish/validate_mesh_has_uv.py @@ -0,0 +1,58 @@ + +import pyblish.api +from ayon_core.hosts.max.api.action import SelectInvalidAction +from ayon_core.pipeline.publish import ( + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) +from pymxs import runtime as rt + + +class ValidateMeshHasUVs(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + + """Validate the current mesh has UVs. + + This validator only checks if the mesh has UVs but not + whether all the individual faces of the mesh have UVs. + + It validates whether the current mesh has texture vertices. + If the mesh does not have texture vertices, it does not + have UVs in Max. + + """ + + order = ValidateMeshOrder + hosts = ['max'] + families = ['model'] + label = 'Validate Mesh Has UVs' + actions = [SelectInvalidAction] + optional = True + + @classmethod + def get_invalid(cls, instance): + meshes = [member for member in instance.data["members"] + if rt.isProperty(member, "mesh")] + invalid = [member for member in meshes + if member.mesh.numTVerts == 0] + return invalid + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + bullet_point_invalid_statement = "\n".join( + "- {}".format(invalid.name) for invalid + in invalid + ) + report = ( + "Model meshes are required to have UVs.\n\n" + "Meshes detected with invalid or missing UVs:\n" + f"{bullet_point_invalid_statement}\n" + ) + raise PublishValidationError( + report, + description=( + "Model meshes are required to have UVs.\n\n" + "Meshes detected with no texture vertice or missing UVs"), + title="Non-mesh objects found or mesh has missing UVs") diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_pointcloud.py b/client/ayon_core/hosts/max/plugins/publish/validate_pointcloud.py index a025ed3992..73b18984ed 100644 --- a/client/ayon_core/hosts/max/plugins/publish/validate_pointcloud.py +++ b/client/ayon_core/hosts/max/plugins/publish/validate_pointcloud.py @@ -59,7 +59,9 @@ class ValidatePointCloud(pyblish.api.InstancePlugin): event_name = sub_anim.name opt = "${0}.{1}.export_particles".format(sel.name, event_name) - for key, value in attr_settings.items(): + for attr in attr_settings: + key = attr["name"] + value = attr["value"] custom_attr = "{0}.PRTChannels_{1}".format(opt, value) try: diff --git a/client/ayon_core/hosts/max/startup/startup.ms b/client/ayon_core/hosts/max/startup/startup.ms index b80ead4b74..4c597901f3 100644 --- a/client/ayon_core/hosts/max/startup/startup.ms +++ b/client/ayon_core/hosts/max/startup/startup.ms @@ -8,5 +8,8 @@ local pythonpath = systemTools.getEnvVariable "MAX_PYTHONPATH" systemTools.setEnvVariable "PYTHONPATH" pythonpath + /*opens the create menu on startup to ensure users are presented with a useful default view.*/ + max create mode + python.ExecuteFile startup ) \ No newline at end of file diff --git a/client/ayon_core/hosts/maya/addon.py b/client/ayon_core/hosts/maya/addon.py index 745850f6a8..c68aa4c911 100644 --- a/client/ayon_core/hosts/maya/addon.py +++ b/client/ayon_core/hosts/maya/addon.py @@ -1,16 +1,13 @@ import os -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon MAYA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class MayaAddon(OpenPypeModule, IHostAddon): +class MayaAddon(AYONAddon, IHostAddon): name = "maya" host_name = "maya" - def initialize(self, module_settings): - self.enabled = True - def add_implementation_envs(self, env, _app): # Add requirements to PYTHONPATH new_python_paths = [ diff --git a/client/ayon_core/hosts/maya/api/action.py b/client/ayon_core/hosts/maya/api/action.py index 1edca82ee4..4beb1e3e5b 100644 --- a/client/ayon_core/hosts/maya/api/action.py +++ b/client/ayon_core/hosts/maya/api/action.py @@ -15,7 +15,7 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action): receive new UUIDs are actually invalid. Requires: - - instance.data["asset"] + - instance.data["folderPath"] """ @@ -78,7 +78,7 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action): # should be always available, but kept a way to query it by name. asset_doc = instance.data.get("assetEntity") if not asset_doc: - asset_name = instance.data["asset"] + asset_name = instance.data["folderPath"] project_name = instance.context.data["projectName"] self.log.info(( "Asset is not stored on instance." diff --git a/client/ayon_core/hosts/maya/api/commands.py b/client/ayon_core/hosts/maya/api/commands.py index b52d5e6c2d..f69dca97a8 100644 --- a/client/ayon_core/hosts/maya/api/commands.py +++ b/client/ayon_core/hosts/maya/api/commands.py @@ -38,25 +38,6 @@ class ToolWindows: cls._windows[tool] = window -def edit_shader_definitions(): - from qtpy import QtWidgets - from ayon_core.hosts.maya.api.shader_definition_editor import ( - ShaderDefinitionsEditor - ) - from ayon_core.tools.utils import qt_app_context - - top_level_widgets = QtWidgets.QApplication.topLevelWidgets() - main_window = next(widget for widget in top_level_widgets - if widget.objectName() == "MayaWindow") - - with qt_app_context(): - window = ToolWindows.get_window("shader_definition_editor") - if not window: - window = ShaderDefinitionsEditor(parent=main_window) - ToolWindows.set_window("shader_definition_editor", window) - window.show() - - def _resolution_from_document(doc): if not doc or "data" not in doc: print("Entered document is not valid. \"{}\"".format(str(doc))) diff --git a/client/ayon_core/hosts/maya/api/lib.py b/client/ayon_core/hosts/maya/api/lib.py index 7b791c3d51..1aa2244111 100644 --- a/client/ayon_core/hosts/maya/api/lib.py +++ b/client/ayon_core/hosts/maya/api/lib.py @@ -24,7 +24,8 @@ from ayon_core.client import ( get_asset_by_name, get_subsets, get_last_versions, - get_representation_by_name + get_representation_by_name, + get_asset_name_identifier, ) from ayon_core.settings import get_project_settings from ayon_core.pipeline import ( @@ -35,7 +36,11 @@ from ayon_core.pipeline import ( loaders_from_representation, get_representation_path, load_container, - registered_host + registered_host, + AVALON_CONTAINER_ID, + AVALON_INSTANCE_ID, + AYON_INSTANCE_ID, + AYON_CONTAINER_ID, ) from ayon_core.lib import NumberDef from ayon_core.pipeline.context_tools import get_current_project_asset @@ -329,7 +334,7 @@ def generate_capture_preset(instance, camera, path, # Update preset with current panel setting # if override_viewport_options is turned off - if not capture_preset["Viewport Options"]["override_viewport_options"]: + if not capture_preset["ViewportOptions"]["override_viewport_options"]: panel_preset = capture.parse_view(preset["panel"]) panel_preset.pop("camera") preset.update(panel_preset) @@ -1863,14 +1868,14 @@ def get_container_members(container): # region LOOKDEV def list_looks(project_name, asset_id): - """Return all look subsets for the given asset + """Return all look products for the given asset - This assumes all look subsets start with "look*" in their names. + This assumes all look products start with "look*" in their names. """ - # # get all subsets with look leading in + # # get all products with look leading in # the name associated with the asset - # TODO this should probably look for family 'look' instead of checking - # subset name that can not start with family + # TODO this should probably look for product type 'look' instead of + # checking product name that can not start with product type subset_docs = get_subsets(project_name, asset_ids=[asset_id]) return [ subset_doc @@ -1937,15 +1942,15 @@ def assign_look_by_version(nodes, version_id): apply_shaders(relationships, shader_nodes, nodes) -def assign_look(nodes, subset="lookDefault"): +def assign_look(nodes, product_name="lookDefault"): """Assigns a look to a node. Optimizes the nodes by grouping by asset id and finding - related subset by name. + related product by name. Args: nodes (list): all nodes to assign the look to - subset (str): name of the subset to find + product_name (str): name of the product to find """ # Group all nodes per asset id @@ -1960,22 +1965,22 @@ def assign_look(nodes, subset="lookDefault"): project_name = get_current_project_name() subset_docs = get_subsets( - project_name, subset_names=[subset], asset_ids=grouped.keys() + project_name, subset_names=[product_name], asset_ids=grouped.keys() ) subset_docs_by_asset_id = { str(subset_doc["parent"]): subset_doc for subset_doc in subset_docs } - subset_ids = { + product_ids = { subset_doc["_id"] for subset_doc in subset_docs_by_asset_id.values() } last_version_docs = get_last_versions( project_name, - subset_ids=subset_ids, + subset_ids=product_ids, fields=["_id", "name", "data.families"] ) - last_version_docs_by_subset_id = { + last_version_docs_by_product_id = { last_version_doc["parent"]: last_version_doc for last_version_doc in last_version_docs } @@ -1984,26 +1989,28 @@ def assign_look(nodes, subset="lookDefault"): # create objectId for database subset_doc = subset_docs_by_asset_id.get(asset_id) if not subset_doc: - log.warning("No subset '{}' found for {}".format(subset, asset_id)) + log.warning(( + "No product '{}' found for {}" + ).format(product_name, asset_id)) continue - last_version = last_version_docs_by_subset_id.get(subset_doc["_id"]) + last_version = last_version_docs_by_product_id.get(subset_doc["_id"]) if not last_version: log.warning(( - "Not found last version for subset '{}' on asset with id {}" - ).format(subset, asset_id)) + "Not found last version for product '{}' on asset with id {}" + ).format(product_name, asset_id)) continue families = last_version.get("data", {}).get("families") or [] if "look" not in families: log.warning(( - "Last version for subset '{}' on asset with id {}" - " does not have look family" - ).format(subset, asset_id)) + "Last version for product '{}' on asset with id {}" + " does not have look product type" + ).format(product_name, asset_id)) continue log.debug("Assigning look '{}' ".format( - subset, last_version["name"])) + product_name, last_version["name"])) assign_look_by_version(asset_nodes, last_version["_id"]) @@ -2100,7 +2107,7 @@ def get_related_sets(node): """Return objectSets that are relationships for a look for `node`. Filters out based on: - - id attribute is NOT `pyblish.avalon.container` + - id attribute is NOT `AVALON_CONTAINER_ID` - shapes and deformer shapes (alembic creates meshShapeDeformed) - set name ends with any from a predefined list - set in not in viewport set (isolate selected for example) @@ -2120,7 +2127,12 @@ def get_related_sets(node): defaults = {"defaultLightSet", "defaultObjectSet"} # Ids to ignore - ignored = {"pyblish.avalon.instance", "pyblish.avalon.container"} + ignored = { + AVALON_INSTANCE_ID, + AVALON_CONTAINER_ID, + AYON_INSTANCE_ID, + AYON_CONTAINER_ID, + } view_sets = get_isolate_view_sets() @@ -2937,14 +2949,15 @@ def load_capture_preset(data): options.update(data["Generic"]) options.update(data["Resolution"]) - camera_options.update(data['Camera Options']) + camera_options.update(data["CameraOptions"]) viewport_options.update(data["Renderer"]) # DISPLAY OPTIONS disp_options = {} - for key, value in data['Display Options'].items(): - if key.startswith('background'): + for key, value in data["DisplayOptions"].items(): + if key.startswith("background"): # Convert background, backgroundTop, backgroundBottom colors + if len(value) == 4: # Ignore alpha + convert RGB to float value = [ @@ -2956,7 +2969,7 @@ def load_capture_preset(data): elif key == "displayGradient": disp_options[key] = value - options['display_options'] = disp_options + options["display_options"] = disp_options # Viewport Options has a mixture of Viewport2 Options and Viewport Options # to pass along to capture. So we'll need to differentiate between the two @@ -2981,7 +2994,7 @@ def load_capture_preset(data): "motionBlurShutterOpenFraction", "lineAAEnable" } - for key, value in data['Viewport Options'].items(): + for key, value in data["ViewportOptions"].items(): # There are some keys we want to ignore if key in {"override_viewport_options", "high_quality"}: @@ -3140,136 +3153,29 @@ def fix_incompatible_containers(): "ReferenceLoader", type="string") -def _null(*args): - pass - - -class shelf(): - '''A simple class to build shelves in maya. Since the build method is empty, - it should be extended by the derived class to build the necessary shelf - elements. By default it creates an empty shelf called "customShelf".''' - - ########################################################################### - '''This is an example shelf.''' - # class customShelf(_shelf): - # def build(self): - # self.addButon(label="button1") - # self.addButon("button2") - # self.addButon("popup") - # p = cmds.popupMenu(b=1) - # self.addMenuItem(p, "popupMenuItem1") - # self.addMenuItem(p, "popupMenuItem2") - # sub = self.addSubMenu(p, "subMenuLevel1") - # self.addMenuItem(sub, "subMenuLevel1Item1") - # sub2 = self.addSubMenu(sub, "subMenuLevel2") - # self.addMenuItem(sub2, "subMenuLevel2Item1") - # self.addMenuItem(sub2, "subMenuLevel2Item2") - # self.addMenuItem(sub, "subMenuLevel1Item2") - # self.addMenuItem(p, "popupMenuItem3") - # self.addButon("button3") - # customShelf() - ########################################################################### - - def __init__(self, name="customShelf", iconPath="", preset={}): - self.name = name - - self.iconPath = iconPath - - self.labelBackground = (0, 0, 0, 0) - self.labelColour = (.9, .9, .9) - - self.preset = preset - - self._cleanOldShelf() - cmds.setParent(self.name) - self.build() - - def build(self): - '''This method should be overwritten in derived classes to actually - build the shelf elements. Otherwise, nothing is added to the shelf.''' - for item in self.preset['items']: - if not item.get('command'): - item['command'] = self._null - if item['type'] == 'button': - self.addButon(item['name'], - command=item['command'], - icon=item['icon']) - if item['type'] == 'menuItem': - self.addMenuItem(item['parent'], - item['name'], - command=item['command'], - icon=item['icon']) - if item['type'] == 'subMenu': - self.addMenuItem(item['parent'], - item['name'], - command=item['command'], - icon=item['icon']) - - def addButon(self, label, icon="commandButton.png", - command=_null, doubleCommand=_null): - ''' - Adds a shelf button with the specified label, command, - double click command and image. - ''' - cmds.setParent(self.name) - if icon: - icon = os.path.join(self.iconPath, icon) - print(icon) - cmds.shelfButton(width=37, height=37, image=icon, label=label, - command=command, dcc=doubleCommand, - imageOverlayLabel=label, olb=self.labelBackground, - olc=self.labelColour) - - def addMenuItem(self, parent, label, command=_null, icon=""): - ''' - Adds a shelf button with the specified label, command, - double click command and image. - ''' - if icon: - icon = os.path.join(self.iconPath, icon) - print(icon) - return cmds.menuItem(p=parent, label=label, c=command, i="") - - def addSubMenu(self, parent, label, icon=None): - ''' - Adds a sub menu item with the specified label and icon to - the specified parent popup menu. - ''' - if icon: - icon = os.path.join(self.iconPath, icon) - print(icon) - return cmds.menuItem(p=parent, label=label, i=icon, subMenu=1) - - def _cleanOldShelf(self): - ''' - Checks if the shelf exists and empties it if it does - or creates it if it does not. - ''' - if cmds.shelfLayout(self.name, ex=1): - if cmds.shelfLayout(self.name, q=1, ca=1): - for each in cmds.shelfLayout(self.name, q=1, ca=1): - cmds.deleteUI(each) - else: - cmds.shelfLayout(self.name, p="ShelfLayout") - - def update_content_on_context_change(): """ - This will update scene content to match new asset on context change + This will update scene content to match new folder on context change """ scene_sets = cmds.listSets(allSets=True) asset_doc = get_current_project_asset() - new_asset = asset_doc["name"] + new_folder_path = get_asset_name_identifier(asset_doc) new_data = asset_doc["data"] for s in scene_sets: try: - if cmds.getAttr("{}.id".format(s)) == "pyblish.avalon.instance": + if cmds.getAttr("{}.id".format(s)) in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: attr = cmds.listAttr(s) print(s) - if "asset" in attr: - print(" - setting asset to: [ {} ]".format(new_asset)) - cmds.setAttr("{}.asset".format(s), - new_asset, type="string") + if "folderPath" in attr: + print( + " - setting folder to: [ {} ]".format(new_folder_path) + ) + cmds.setAttr( + "{}.folderPath".format(s), + new_folder_path, type="string" + ) if "frameStart" in attr: cmds.setAttr("{}.frameStart".format(s), new_data["frameStart"]) @@ -4051,7 +3957,9 @@ def get_all_children(nodes): return list(traversed) -def get_capture_preset(task_name, task_type, subset, project_settings, log): +def get_capture_preset( + task_name, task_type, product_name, project_settings, log +): """Get capture preset for playblasting. Logic for transitioning from old style capture preset to new capture preset @@ -4059,18 +3967,16 @@ def get_capture_preset(task_name, task_type, subset, project_settings, log): Args: task_name (str): Task name. - take_type (str): Task type. - subset (str): Subset name. + task_type (str): Task type. + product_name (str): Subset name. project_settings (dict): Project settings. - log (object): Logging object. + log (logging.Logger): Logging object. """ capture_preset = None filtering_criteria = { - "hosts": "maya", - "families": "review", "task_names": task_name, "task_types": task_type, - "subset": subset + "product_names": product_name } plugin_settings = project_settings["maya"]["publish"]["ExtractPlayblast"] @@ -4091,8 +3997,18 @@ def get_capture_preset(task_name, task_type, subset, project_settings, log): "Falling back to deprecated Extract Playblast capture preset " "because no new style playblast profiles are defined." ) - capture_preset = plugin_settings["capture_preset"] + capture_preset = plugin_settings.get("capture_preset") + if capture_preset: + # Create deepcopy of preset as we'll change the values + capture_preset = copy.deepcopy(capture_preset) + + viewport_options = capture_preset["ViewportOptions"] + # Change 'list' to 'dict' for 'capture.py' + viewport_options["pluginObjects"] = { + item["name"]: item["value"] + for item in viewport_options["pluginObjects"] + } return capture_preset or {} @@ -4203,8 +4119,8 @@ def create_rig_animation_instance( ) assert roots, "No root nodes in rig, this is a bug." - custom_subset = options.get("animationSubsetName") - if custom_subset: + custom_product_name = options.get("animationProductName") + if custom_product_name: formatting_data = { "asset": context["asset"], "subset": context['subset']['name'], @@ -4214,13 +4130,11 @@ def create_rig_animation_instance( ) } namespace = get_custom_namespace( - custom_subset.format( - **formatting_data - ) + custom_product_name.format(**formatting_data) ) if log: - log.info("Creating subset: {}".format(namespace)) + log.info("Creating product: {}".format(namespace)) # Fill creator identifier creator_identifier = "io.openpype.creators.maya.animation" diff --git a/client/ayon_core/hosts/maya/api/lib_rendersettings.py b/client/ayon_core/hosts/maya/api/lib_rendersettings.py index 54ee7888b3..b8a4d04a10 100644 --- a/client/ayon_core/hosts/maya/api/lib_rendersettings.py +++ b/client/ayon_core/hosts/maya/api/lib_rendersettings.py @@ -46,7 +46,7 @@ class RenderSettings(object): project_settings = get_project_settings( get_current_project_name() ) - render_settings = project_settings["maya"]["RenderSettings"] + render_settings = project_settings["maya"]["render_settings"] image_prefixes = { "vray": render_settings["vray_renderer"]["image_prefix"], "arnold": render_settings["arnold_renderer"]["image_prefix"], @@ -82,12 +82,12 @@ class RenderSettings(object): try: aov_separator = self._aov_chars[( self._project_settings["maya"] - ["RenderSettings"] + ["render_settings"] ["aov_separator"] )] except KeyError: aov_separator = "_" - reset_frame = self._project_settings["maya"]["RenderSettings"]["reset_current_frame"] # noqa + reset_frame = self._project_settings["maya"]["render_settings"]["reset_current_frame"] # noqa if reset_frame: start_frame = cmds.getAttr("defaultRenderGlobals.startFrame") @@ -131,7 +131,7 @@ class RenderSettings(object): import maya.mel as mel # noqa: F401 createOptions() - render_settings = self._project_settings["maya"]["RenderSettings"] + render_settings = self._project_settings["maya"]["render_settings"] arnold_render_presets = render_settings["arnold_renderer"] # noqa # Force resetting settings and AOV list to avoid having to deal with # AOV checking logic, for now. @@ -180,7 +180,7 @@ class RenderSettings(object): from maya import cmds # noqa: F401 import maya.mel as mel # noqa: F401 - render_settings = self._project_settings["maya"]["RenderSettings"] + render_settings = self._project_settings["maya"]["render_settings"] redshift_render_presets = render_settings["redshift_renderer"] remove_aovs = render_settings["remove_aovs"] @@ -239,7 +239,7 @@ class RenderSettings(object): rman_render_presets = ( self._project_settings ["maya"] - ["RenderSettings"] + ["render_settings"] ["renderman_renderer"] ) display_filters = rman_render_presets["display_filters"] @@ -304,7 +304,7 @@ class RenderSettings(object): settings = cmds.ls(type="VRaySettingsNode") node = settings[0] if settings else cmds.createNode("VRaySettingsNode") - render_settings = self._project_settings["maya"]["RenderSettings"] + render_settings = self._project_settings["maya"]["render_settings"] vray_render_presets = render_settings["vray_renderer"] # vrayRenderElement remove_aovs = render_settings["remove_aovs"] @@ -390,7 +390,8 @@ class RenderSettings(object): import maya.mel as mel # noqa: F401 for item in additional_attribs: - attribute, value = item + attribute = item["attribute"] + value = item["value"] attribute = str(attribute) # ensure str conversion from settings attribute_type = cmds.getAttr(attribute, type=True) if attribute_type in {"long", "bool"}: diff --git a/client/ayon_core/hosts/maya/api/menu.py b/client/ayon_core/hosts/maya/api/menu.py index 7478739496..70347e91b6 100644 --- a/client/ayon_core/hosts/maya/api/menu.py +++ b/client/ayon_core/hosts/maya/api/menu.py @@ -9,7 +9,8 @@ import maya.cmds as cmds from ayon_core.pipeline import ( get_current_asset_name, - get_current_task_name + get_current_task_name, + registered_host ) from ayon_core.pipeline.workfile import BuildWorkfile from ayon_core.tools.utils import host_tools @@ -21,8 +22,10 @@ from .workfile_template_builder import ( create_placeholder, update_placeholder, build_workfile_template, - update_workfile_template, + update_workfile_template ) +from ayon_core.tools.workfile_template_build import open_template_ui +from .workfile_template_builder import MayaTemplateBuilder log = logging.getLogger(__name__) @@ -167,16 +170,6 @@ def install(project_settings): tearOff=True, parent=MENU_NAME ) - cmds.menuItem( - "Create Placeholder", - parent=builder_menu, - command=create_placeholder - ) - cmds.menuItem( - "Update Placeholder", - parent=builder_menu, - command=update_placeholder - ) cmds.menuItem( "Build Workfile from template", parent=builder_menu, @@ -187,6 +180,27 @@ def install(project_settings): parent=builder_menu, command=update_workfile_template ) + cmds.menuItem( + divider=True, + parent=builder_menu + ) + cmds.menuItem( + "Open Template", + parent=builder_menu, + command=lambda *args: open_template_ui( + MayaTemplateBuilder(registered_host()), get_main_window() + ), + ) + cmds.menuItem( + "Create Placeholder", + parent=builder_menu, + command=create_placeholder + ) + cmds.menuItem( + "Update Placeholder", + parent=builder_menu, + command=update_placeholder + ) cmds.setParent(MENU_NAME, menu=True) diff --git a/client/ayon_core/hosts/maya/api/pipeline.py b/client/ayon_core/hosts/maya/api/pipeline.py index 95617cb90a..90fb2e5888 100644 --- a/client/ayon_core/hosts/maya/api/pipeline.py +++ b/client/ayon_core/hosts/maya/api/pipeline.py @@ -26,7 +26,6 @@ from ayon_core.lib import ( emit_event ) from ayon_core.pipeline import ( - legacy_io, get_current_project_name, register_loader_plugin_path, register_inventory_action_path, @@ -34,6 +33,7 @@ from ayon_core.pipeline import ( deregister_loader_plugin_path, deregister_inventory_action_path, deregister_creator_plugin_path, + AYON_CONTAINER_ID, AVALON_CONTAINER_ID, ) from ayon_core.pipeline.load import any_outdated_containers @@ -247,7 +247,7 @@ def _set_project(): None """ - workdir = legacy_io.Session["AVALON_WORKDIR"] + workdir = os.getenv("AYON_WORKDIR") try: os.makedirs(workdir) @@ -377,9 +377,11 @@ def _ls(): yield iterator.thisNode() iterator.next() - ids = {AVALON_CONTAINER_ID, - # Backwards compatibility - "pyblish.mindbender.container"} + ids = { + AYON_CONTAINER_ID, + # Backwards compatibility + AVALON_CONTAINER_ID + } # Iterate over all 'set' nodes in the scene to detect whether # they have the avalon container ".id" attribute. @@ -629,7 +631,7 @@ def on_task_changed(): # Run menu.update_menu_task_label() - workdir = legacy_io.Session["AVALON_WORKDIR"] + workdir = os.getenv("AYON_WORKDIR") if os.path.exists(workdir): log.info("Updating Maya workspace for task change to %s", workdir) _set_project() @@ -678,7 +680,7 @@ def workfile_save_before_xgen(event): import xgenm - current_work_dir = legacy_io.Session["AVALON_WORKDIR"].replace("\\", "/") + current_work_dir = os.getenv("AYON_WORKDIR").replace("\\", "/") expected_work_dir = event.data["workdir_path"].replace("\\", "/") if current_work_dir == expected_work_dir: return diff --git a/client/ayon_core/hosts/maya/api/plugin.py b/client/ayon_core/hosts/maya/api/plugin.py index c5e3f42d10..1151b0e248 100644 --- a/client/ayon_core/hosts/maya/api/plugin.py +++ b/client/ayon_core/hosts/maya/api/plugin.py @@ -10,6 +10,9 @@ from maya.app.renderSetup.model import renderSetup from ayon_core.lib import BoolDef, Logger from ayon_core.settings import get_project_settings from ayon_core.pipeline import ( + AYON_INSTANCE_ID, + AYON_CONTAINER_ID, + AVALON_INSTANCE_ID, AVALON_CONTAINER_ID, Anatomy, @@ -22,10 +25,11 @@ from ayon_core.pipeline import ( LegacyCreator, LoaderPlugin, get_representation_path, + get_current_project_name, ) from ayon_core.pipeline.load import LoadError from ayon_core.client import get_asset_by_name -from ayon_core.pipeline.create import get_subset_name +from ayon_core.pipeline.create import get_product_name from . import lib from .lib import imprint, read @@ -85,31 +89,30 @@ class Creator(LegacyCreator): class MayaCreatorBase(object): @staticmethod - def cache_subsets(shared_data): + def cache_instance_data(shared_data): """Cache instances for Creators to shared data. - Create `maya_cached_subsets` key when needed in shared data and + Create `maya_cached_instance_data` key when needed in shared data and fill it with all collected instances from the scene under its respective creator identifiers. If legacy instances are detected in the scene, create - `maya_cached_legacy_subsets` there and fill it with - all legacy subsets under family as a key. + `maya_cached_legacy_instances` there and fill it with + all legacy products under product type as a key. Args: Dict[str, Any]: Shared data. - Return: - Dict[str, Any]: Shared data dictionary. - """ - if shared_data.get("maya_cached_subsets") is None: + if shared_data.get("maya_cached_instance_data") is None: cache = dict() cache_legacy = dict() for node in cmds.ls(type="objectSet"): - if _get_attr(node, attr="id") != "pyblish.avalon.instance": + if _get_attr(node, attr="id") not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: continue creator_id = _get_attr(node, attr="creator_identifier") @@ -125,8 +128,8 @@ class MayaCreatorBase(object): cache_legacy.setdefault(family, []).append(node) - shared_data["maya_cached_subsets"] = cache - shared_data["maya_cached_legacy_subsets"] = cache_legacy + shared_data["maya_cached_instance_data"] = cache + shared_data["maya_cached_legacy_instances"] = cache_legacy return shared_data def get_publish_families(self): @@ -137,8 +140,7 @@ class MayaCreatorBase(object): specify `usd` but apply different extractors like `usdMultiverse`. There is no need to override this method if you only have the - primary family defined by the `family` property as that will always - be set. + 'product_type' required for publish filtering. Returns: list: families for instances of this creator @@ -159,7 +161,7 @@ class MayaCreatorBase(object): data.pop("families", None) # We store creator attributes at the root level and assume they - # will not clash in names with `subset`, `task`, etc. and other + # will not clash in names with `product`, `task`, etc. and other # default names. This is just so these attributes in many cases # are still editable in the maya UI by artists. # note: pop to move to end of dict to sort attributes last on the node @@ -238,9 +240,11 @@ class MayaCreatorBase(object): return node_data def _default_collect_instances(self): - self.cache_subsets(self.collection_shared_data) - cached_subsets = self.collection_shared_data["maya_cached_subsets"] - for node in cached_subsets.get(self.identifier, []): + self.cache_instance_data(self.collection_shared_data) + cached_instances = ( + self.collection_shared_data["maya_cached_instance_data"] + ) + for node in cached_instances.get(self.identifier, []): node_data = self.read_instance_node(node) created_instance = CreatedInstance.from_existing(node_data, self) @@ -273,7 +277,7 @@ class MayaCreator(NewCreator, MayaCreatorBase): settings_category = "maya" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): members = list() if pre_create_data.get("use_selection"): @@ -288,11 +292,11 @@ class MayaCreator(NewCreator, MayaCreatorBase): families.append(family) with lib.undo_chunk(): - instance_node = cmds.sets(members, name=subset_name) + instance_node = cmds.sets(members, name=product_name) instance_data["instance_node"] = instance_node instance = CreatedInstance( - self.family, - subset_name, + self.product_type, + product_name, instance_data, self) self._add_instance_to_context(instance) @@ -379,7 +383,7 @@ def ensure_namespace(namespace): class RenderlayerCreator(NewCreator, MayaCreatorBase): """Creator which creates an instance per renderlayer in the workfile. - Create and manages renderlayer subset per renderLayer in workfile. + Create and manages renderlayer product per renderLayer in workfile. This generates a singleton node in the scene which, if it exists, tells the Creator to collect Maya rendersetup renderlayers as individual instances. As such, triggering create doesn't actually create the instance node per @@ -399,7 +403,7 @@ class RenderlayerCreator(NewCreator, MayaCreatorBase): if nodes: return nodes if return_all else nodes[0] - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): # A Renderlayer is never explicitly created using the create method. # Instead, renderlayers from the scene are collected. Thus "create" # would only ever be called to say, 'hey, please refresh collect' @@ -437,6 +441,7 @@ class RenderlayerCreator(NewCreator, MayaCreatorBase): if not self._get_singleton_node(): return + host_name = self.create_context.host_name rs = renderSetup.instance() layers = rs.getRenderLayers() for layer in layers: @@ -456,15 +461,17 @@ class RenderlayerCreator(NewCreator, MayaCreatorBase): "variant": layer.name(), } asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - layer.name(), - instance_data["task"], + product_name = self.get_product_name( + project_name, asset_doc, - project_name) + instance_data["task"], + layer.name(), + host_name, + ) instance = CreatedInstance( - family=self.family, - subset_name=subset_name, + product_type=self.product_type, + product_name=product_name, data=instance_data, creator=self ) @@ -568,21 +575,64 @@ class RenderlayerCreator(NewCreator, MayaCreatorBase): if node and cmds.objExists(node): cmds.delete(node) - def get_subset_name( + def get_product_name( self, - variant, - task_name, - asset_doc, project_name, + asset_doc, + task_name, + variant, host_name=None, instance=None ): - # creator.family != 'render' as expected - return get_subset_name(self.layer_instance_prefix, - variant, - task_name, - asset_doc, - project_name) + if host_name is None: + host_name = self.create_context.host_name + dynamic_data = self.get_dynamic_data( + project_name, asset_doc, task_name, variant, host_name, instance + ) + # creator.product_type != 'render' as expected + return get_product_name( + project_name, + asset_doc, + task_name, + host_name, + self.layer_instance_prefix or self.product_type, + variant, + dynamic_data=dynamic_data, + project_settings=self.project_settings + ) + + +def get_load_color_for_product_type(product_type, settings=None): + """Get color for product type from settings. + + Args: + product_type (str): Family name. + settings (Optional[dict]): Settings dictionary. + + Returns: + Union[tuple[float, float, float], None]: RGB color. + + """ + if settings is None: + settings = get_project_settings(get_current_project_name()) + + colors = settings["maya"]["load"]["colors"] + color = colors.get(product_type) + if not color: + return None + + if len(color) == 3: + red, green, blue = color + elif len(color) == 4: + red, green, blue, _ = color + else: + raise ValueError("Invalid color definition {}".format(str(color))) + + if isinstance(red, int): + red = red / 255.0 + green = green / 255.0 + blue = blue / 255.0 + return red, green, blue class Loader(LoaderPlugin): @@ -591,8 +641,8 @@ class Loader(LoaderPlugin): load_settings = {} # defined in settings @classmethod - def apply_settings(cls, project_settings, system_settings): - super(Loader, cls).apply_settings(project_settings, system_settings) + def apply_settings(cls, project_settings): + super(Loader, cls).apply_settings(project_settings) cls.load_settings = project_settings['maya']['load'] def get_custom_namespace_and_group(self, context, options, loader_key): @@ -611,33 +661,38 @@ class Loader(LoaderPlugin): options["attach_to_root"] = True custom_naming = self.load_settings[loader_key] - if not custom_naming['namespace']: + if not custom_naming["namespace"]: raise LoadError("No namespace specified in " "Maya ReferenceLoader settings") - elif not custom_naming['group_name']: + elif not custom_naming["group_name"]: self.log.debug("No custom group_name, no group will be created.") options["attach_to_root"] = False - asset = context['asset'] - subset = context['subset'] + asset_doc = context["asset"] + subset_doc = context["subset"] + product_type = ( + subset_doc["data"].get("family") + or subset_doc["data"]["families"][0] + ) formatting_data = { - "asset_name": asset['name'], - "asset_type": asset['type'], + "asset_name": asset_doc["name"], + "asset_type": asset_doc["type"], "folder": { - "name": asset["name"], + "name": asset_doc["name"], }, - "subset": subset['name'], - "family": ( - subset['data'].get('family') or - subset['data']['families'][0] - ) + "subset": subset_doc["name"], + "product": { + "name": subset_doc["name"], + "type": product_type, + }, + "family": product_type } - custom_namespace = custom_naming['namespace'].format( + custom_namespace = custom_naming["namespace"].format( **formatting_data ) - custom_group_name = custom_naming['group_name'].format( + custom_group_name = custom_naming["group_name"].format( **formatting_data ) @@ -701,7 +756,7 @@ class ReferenceLoader(Loader): options['group_name'] = group_name - # Offset loaded subset + # Offset loaded product if "offset" in options: offset = [i * c for i in options["offset"]] options["translate"] = offset @@ -738,14 +793,17 @@ class ReferenceLoader(Loader): """To be implemented by subclass""" raise NotImplementedError("Must be implemented by subclass") - def update(self, container, representation): + def update(self, container, context): from maya import cmds from ayon_core.hosts.maya.api.lib import get_container_members node = container["objectName"] - path = get_representation_path(representation) + project_name = context["project"]["name"] + repre_doc = context["representation"] + + path = get_representation_path(repre_doc) # Get reference node from container members members = get_container_members(node) @@ -758,9 +816,9 @@ class ReferenceLoader(Loader): "abc": "Alembic", "fbx": "FBX", "usd": "USD Import" - }.get(representation["name"]) + }.get(repre_doc["name"]) - assert file_type, "Unsupported representation: %s" % representation + assert file_type, "Unsupported representation: %s" % repre_doc assert os.path.exists(path), "%s does not exist." % path @@ -768,7 +826,7 @@ class ReferenceLoader(Loader): # them to incoming data. alembic_attrs = ["speed", "offset", "cycleType", "time"] alembic_data = {} - if representation["name"] == "abc": + if repre_doc["name"] == "abc": alembic_nodes = cmds.ls( "{}:*".format(namespace), type="AlembicNode" ) @@ -785,10 +843,7 @@ class ReferenceLoader(Loader): self.log.debug("No alembic nodes found in {}".format(members)) try: - path = self.prepare_root_value(path, - representation["context"] - ["project"] - ["name"]) + path = self.prepare_root_value(path, project_name) content = cmds.file(path, loadReference=reference_node, type=file_type, @@ -812,7 +867,7 @@ class ReferenceLoader(Loader): self._organize_containers(content, container["objectName"]) # Reapply alembic settings. - if representation["name"] == "abc" and alembic_data: + if repre_doc["name"] == "abc" and alembic_data: alembic_nodes = cmds.ls( "{}:*".format(namespace), type="AlembicNode" ) @@ -846,7 +901,7 @@ class ReferenceLoader(Loader): # Update metadata cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + str(repre_doc["_id"]), type="string") # When an animation or pointcache gets connected to an Xgen container, @@ -937,7 +992,7 @@ class ReferenceLoader(Loader): """ settings = get_project_settings(project_name) use_env_var_as_root = (settings["maya"] - ["maya-dirmap"] + ["maya_dirmap"] ["use_env_var_as_root"]) if use_env_var_as_root: anatomy = Anatomy(project_name) @@ -953,5 +1008,7 @@ class ReferenceLoader(Loader): id_attr = "{}.id".format(node) if not cmds.attributeQuery("id", node=node, exists=True): continue - if cmds.getAttr(id_attr) == AVALON_CONTAINER_ID: + if cmds.getAttr(id_attr) not in { + AYON_CONTAINER_ID, AVALON_CONTAINER_ID + }: cmds.sets(node, forceElement=container) diff --git a/client/ayon_core/hosts/maya/api/setdress.py b/client/ayon_core/hosts/maya/api/setdress.py index 7a1054cc49..913e12fd3f 100644 --- a/client/ayon_core/hosts/maya/api/setdress.py +++ b/client/ayon_core/hosts/maya/api/setdress.py @@ -315,16 +315,27 @@ def update_package_version(container, version): new_representation = get_representation_by_name( project_name, current_representation["name"], new_version["_id"] ) - - update_package(container, new_representation) + # TODO there is 'get_representation_context' to get the context which + # could be possible to use here + new_context = { + "project": { + "name": project_doc["name"], + "code": project_doc["data"].get("code", "") + }, + "asset": asset_doc, + "subset": subset_doc, + "version": version_doc, + "representation": new_representation, + } + update_package(container, new_context) -def update_package(set_container, representation): +def update_package(set_container, context): """Update any matrix changes in the scene based on the new data Args: set_container (dict): container data from `ls()` - representation (dict): the representation document from the database + context (dict): the representation document from the database Returns: None @@ -332,7 +343,8 @@ def update_package(set_container, representation): """ # Load the original package data - project_name = get_current_project_name() + project_name = context["project"]["name"] + repre_doc = context["representation"] current_representation = get_representation_by_id( project_name, set_container["representation"] ) @@ -343,7 +355,7 @@ def update_package(set_container, representation): current_data = json.load(fp) # Load the new package data - new_file = get_representation_path(representation) + new_file = get_representation_path(repre_doc) assert new_file.endswith(".json") with open(new_file, "r") as fp: new_data = json.load(fp) @@ -354,7 +366,7 @@ def update_package(set_container, representation): # TODO: This should be handled by the pipeline itself cmds.setAttr(set_container['objectName'] + ".representation", - str(representation['_id']), type="string") + str(repre_doc['_id']), type="string") def update_scene(set_container, containers, current_data, new_data, new_file): @@ -480,7 +492,7 @@ def update_scene(set_container, containers, current_data, new_data, new_file): continue # Check whether the conversion can be done by the Loader. - # They *must* use the same asset, subset and Loader for + # They *must* use the same asset, product and Loader for # `update_container` to make sense. old = get_representation_by_id( project_name, representation_current @@ -559,13 +571,14 @@ def compare_representations(old, new): new_context = new["context"] old_context = old["context"] + # TODO add better validation e.g. based on parent ids if new_context["asset"] != old_context["asset"]: log.error("Changing assets between updates is " "not supported.") return False if new_context["subset"] != old_context["subset"]: - log.error("Changing subsets between updates is " + log.error("Changing products between updates is " "not supported.") return False diff --git a/client/ayon_core/hosts/maya/api/shader_definition_editor.py b/client/ayon_core/hosts/maya/api/shader_definition_editor.py deleted file mode 100644 index 04e8dded6f..0000000000 --- a/client/ayon_core/hosts/maya/api/shader_definition_editor.py +++ /dev/null @@ -1,176 +0,0 @@ -# -*- coding: utf-8 -*- -"""Editor for shader definitions. - -Shader names are stored as simple text file over GridFS in mongodb. - -""" -import os -from qtpy import QtWidgets, QtCore, QtGui -from ayon_core.client.mongo import OpenPypeMongoConnection -from ayon_core import resources -import gridfs - - -DEFINITION_FILENAME = "{}/maya/shader_definition.txt".format( - os.getenv("AVALON_PROJECT")) - - -class ShaderDefinitionsEditor(QtWidgets.QWidget): - """Widget serving as simple editor for shader name definitions.""" - - # name of the file used to store definitions - - def __init__(self, parent=None): - super(ShaderDefinitionsEditor, self).__init__(parent) - self._mongo = OpenPypeMongoConnection.get_mongo_client() - self._gridfs = gridfs.GridFS( - self._mongo[os.getenv("OPENPYPE_DATABASE_NAME")]) - self._editor = None - - self._original_content = self._read_definition_file() - - self.setObjectName("shaderDefinitionEditor") - self.setWindowTitle("OpenPype shader name definition editor") - icon = QtGui.QIcon(resources.get_ayon_icon_filepath()) - self.setWindowIcon(icon) - self.setWindowFlags(QtCore.Qt.Window) - self.setParent(parent) - self.setAttribute(QtCore.Qt.WA_DeleteOnClose) - self.resize(750, 500) - - self._setup_ui() - self._reload() - - def _setup_ui(self): - """Setup UI of Widget.""" - layout = QtWidgets.QVBoxLayout(self) - label = QtWidgets.QLabel() - label.setText("Put shader names here - one name per line:") - layout.addWidget(label) - self._editor = QtWidgets.QPlainTextEdit() - self._editor.setStyleSheet("border: none;") - layout.addWidget(self._editor) - - btn_layout = QtWidgets.QHBoxLayout() - save_btn = QtWidgets.QPushButton("Save") - save_btn.clicked.connect(self._save) - - reload_btn = QtWidgets.QPushButton("Reload") - reload_btn.clicked.connect(self._reload) - - exit_btn = QtWidgets.QPushButton("Exit") - exit_btn.clicked.connect(self._close) - - btn_layout.addWidget(reload_btn) - btn_layout.addWidget(save_btn) - btn_layout.addWidget(exit_btn) - - layout.addLayout(btn_layout) - - def _read_definition_file(self, file=None): - """Read definition file from database. - - Args: - file (gridfs.grid_file.GridOut, Optional): File to read. If not - set, new query will be issued to find it. - - Returns: - str: Content of the file or empty string if file doesn't exist. - - """ - content = "" - if not file: - file = self._gridfs.find_one( - {"filename": DEFINITION_FILENAME}) - if not file: - print(">>> [SNDE]: nothing in database yet") - return content - content = file.read() - file.close() - return content - - def _write_definition_file(self, content, force=False): - """Write content as definition to file in database. - - Before file is written, check is made if its content has not - changed. If is changed, warning is issued to user if he wants - it to overwrite. Note: GridFs doesn't allow changing file content. - You need to delete existing file and create new one. - - Args: - content (str): Content to write. - - Raises: - ContentException: If file is changed in database while - editor is running. - """ - file = self._gridfs.find_one( - {"filename": DEFINITION_FILENAME}) - if file: - content_check = self._read_definition_file(file) - if content == content_check: - print(">>> [SNDE]: content not changed") - return - if self._original_content != content_check: - if not force: - raise ContentException("Content changed") - print(">>> [SNDE]: overwriting data") - file.close() - self._gridfs.delete(file._id) - - file = self._gridfs.new_file( - filename=DEFINITION_FILENAME, - content_type='text/plain', - encoding='utf-8') - file.write(content) - file.close() - QtCore.QTimer.singleShot(200, self._reset_style) - self._editor.setStyleSheet("border: 1px solid #33AF65;") - self._original_content = content - - def _reset_style(self): - """Reset editor style back. - - Used to visually indicate save. - - """ - self._editor.setStyleSheet("border: none;") - - def _close(self): - self.hide() - - def closeEvent(self, event): - event.ignore() - self.hide() - - def _reload(self): - print(">>> [SNDE]: reloading") - self._set_content(self._read_definition_file()) - - def _save(self): - try: - self._write_definition_file(content=self._editor.toPlainText()) - except ContentException: - # content has changed meanwhile - print(">>> [SNDE]: content has changed") - self._show_overwrite_warning() - - def _set_content(self, content): - self._editor.setPlainText(content) - - def _show_overwrite_warning(self): - reply = QtWidgets.QMessageBox.question( - self, - "Warning", - ("Content you are editing was changed meanwhile in database.\n" - "Please, reload and solve the conflict."), - QtWidgets.QMessageBox.OK) - - if reply == QtWidgets.QMessageBox.OK: - # do nothing - pass - - -class ContentException(Exception): - """This is risen during save if file is changed in database.""" - pass diff --git a/client/ayon_core/hosts/maya/api/workfile_template_builder.py b/client/ayon_core/hosts/maya/api/workfile_template_builder.py index c3c61e5444..6ae2a075e3 100644 --- a/client/ayon_core/hosts/maya/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/maya/api/workfile_template_builder.py @@ -2,7 +2,12 @@ import json from maya import cmds -from ayon_core.pipeline import registered_host, get_current_asset_name +from ayon_core.pipeline import ( + registered_host, + get_current_asset_name, + AYON_INSTANCE_ID, + AVALON_INSTANCE_ID, +) from ayon_core.pipeline.workfile.workfile_template_builder import ( TemplateAlreadyImported, AbstractTemplateBuilder, @@ -73,7 +78,9 @@ class MayaTemplateBuilder(AbstractTemplateBuilder): for node in imported_sets: if not cmds.attributeQuery("id", node=node, exists=True): continue - if cmds.getAttr("{}.id".format(node)) != "pyblish.avalon.instance": + if cmds.getAttr("{}.id".format(node)) not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: continue if not cmds.attributeQuery("asset", node=node, exists=True): continue @@ -133,10 +140,12 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): placeholder_name_parts = placeholder_data["builder_type"].split("_") pos = 1 - # add family in any - placeholder_family = placeholder_data["family"] - if placeholder_family: - placeholder_name_parts.insert(pos, placeholder_family) + placeholder_product_type = placeholder_data.get("product_type") + if placeholder_product_type is None: + placeholder_product_type = placeholder_data.get("family") + + if placeholder_product_type: + placeholder_name_parts.insert(pos, placeholder_product_type) pos += 1 # add loader arguments if any diff --git a/client/ayon_core/hosts/maya/api/workio.py b/client/ayon_core/hosts/maya/api/workio.py index 8c31974c73..ff6c11eb4f 100644 --- a/client/ayon_core/hosts/maya/api/workio.py +++ b/client/ayon_core/hosts/maya/api/workio.py @@ -35,7 +35,7 @@ def current_file(): def work_root(session): - work_dir = session["AVALON_WORKDIR"] + work_dir = session["AYON_WORKDIR"] scene_dir = None # Query scene file rule from workspace.mel if it exists in WORKDIR diff --git a/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py b/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py index 7198f98131..03ca8661bd 100644 --- a/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py +++ b/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py @@ -12,7 +12,7 @@ class PreCopyMel(PreLaunchHook): def execute(self): project_doc = self.data["project_doc"] - workdir = self.launch_context.env.get("AVALON_WORKDIR") + workdir = self.launch_context.env.get("AYON_WORKDIR") if not workdir: self.log.warning("BUG: Workdir is not filled.") return diff --git a/client/ayon_core/hosts/maya/plugins/create/convert_legacy.py b/client/ayon_core/hosts/maya/plugins/create/convert_legacy.py index 029ea25b40..b23c56fc5b 100644 --- a/client/ayon_core/hosts/maya/plugins/create/convert_legacy.py +++ b/client/ayon_core/hosts/maya/plugins/create/convert_legacy.py @@ -10,30 +10,32 @@ from maya.app.renderSetup.model import renderSetup class MayaLegacyConvertor(SubsetConvertorPlugin, plugin.MayaCreatorBase): - """Find and convert any legacy subsets in the scene. + """Find and convert any legacy products in the scene. - This Convertor will find all legacy subsets in the scene and will - transform them to the current system. Since the old subsets doesn't + This Convertor will find all legacy products in the scene and will + transform them to the current system. Since the old products doesn't retain any information about their original creators, the only mapping we can do is based on their families. - Its limitation is that you can have multiple creators creating subset - of the same family and there is no way to handle it. This code should + Its limitation is that you can have multiple creators creating product + of the same type and there is no way to handle it. This code should nevertheless cover all creators that came with OpenPype. """ identifier = "io.openpype.creators.maya.legacy" - # Cases where the identifier or new family doesn't correspond to the + # Cases where the identifier or new product type doesn't correspond to the # original family on the legacy instances - special_family_conversions = { + product_type_mapping = { "rendering": "io.openpype.creators.maya.renderlayer", } def find_instances(self): - self.cache_subsets(self.collection_shared_data) - legacy = self.collection_shared_data.get("maya_cached_legacy_subsets") + self.cache_instance_data(self.collection_shared_data) + legacy = self.collection_shared_data.get( + "maya_cached_legacy_instances" + ) if not legacy: return @@ -45,43 +47,43 @@ class MayaLegacyConvertor(SubsetConvertorPlugin, # We can't use the collected shared data cache here # we re-query it here directly to convert all found. cache = {} - self.cache_subsets(cache) - legacy = cache.get("maya_cached_legacy_subsets") + self.cache_instance_data(cache) + legacy = cache.get("maya_cached_legacy_instances") if not legacy: return # From all current new style manual creators find the mapping - # from family to identifier - family_to_id = {} + # from product type to identifier + product_type_to_id = {} for identifier, creator in self.create_context.creators.items(): - family = getattr(creator, "family", None) - if not family: + product_type = getattr(creator, "product_type", None) + if not product_type: continue - if family in family_to_id: - # We have a clash of family -> identifier. Multiple - # new style creators use the same family - self.log.warning("Clash on family->identifier: " - "{}".format(identifier)) - family_to_id[family] = identifier + if product_type in product_type_to_id: + # We have a clash of product type -> identifier. Multiple + # new style creators use the same product type + self.log.warning( + "Clash on product type->identifier: {}".format(identifier) + ) + product_type_to_id[product_type] = identifier - family_to_id.update(self.special_family_conversions) + product_type_to_id.update(self.product_type_mapping) # We also embed the current 'task' into the instance since legacy # instances didn't store that data on the instances. The old style # logic was thus to be live to the current task to begin with. data = dict() data["task"] = self.create_context.get_current_task_name() - for family, instance_nodes in legacy.items(): - if family not in family_to_id: - self.log.warning( + for product_type, instance_nodes in legacy.items(): + if product_type not in product_type_to_id: + self.log.warning(( "Unable to convert legacy instance with family '{}'" - " because there is no matching new creator's family" - "".format(family) - ) + " because there is no matching new creator" + ).format(product_type)) continue - creator_id = family_to_id[family] + creator_id = product_type_to_id[family] creator = self.create_context.creators[creator_id] data["creator_identifier"] = creator_id @@ -133,21 +135,22 @@ class MayaLegacyConvertor(SubsetConvertorPlugin, # Copy the attributes of the original instance to the new node original_data = read(instance_node) - # The family gets converted to the new family (this is due to - # "rendering" family being converted to "renderlayer" family) - original_data["family"] = creator.family + # The product type gets converted to the new product type (this + # is due to "rendering" being converted to "renderlayer") + original_data["productType"] = creator.product_type - # recreate subset name as without it would be + # recreate product name as without it would be # `renderingMain` vs correct `renderMain` project_name = self.create_context.get_current_project_name() asset_doc = get_asset_by_name(project_name, original_data["asset"]) - subset_name = creator.get_subset_name( - original_data["variant"], - data["task"], + product_name = creator.get_product_name( + project_name, asset_doc, - project_name) - original_data["subset"] = subset_name + data["task"], + original_data["variant"], + ) + original_data["productName"] = product_name # Convert to creator attributes when relevant creator_attributes = {} diff --git a/client/ayon_core/hosts/maya/plugins/create/create_animation.py b/client/ayon_core/hosts/maya/plugins/create/create_animation.py index e6849b4468..f30d9aba81 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_animation.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_animation.py @@ -18,7 +18,7 @@ class CreateAnimation(plugin.MayaHiddenCreator): identifier = "io.openpype.creators.maya.animation" name = "animationDefault" label = "Animation" - family = "animation" + product_type = "animation" icon = "male" write_color_sets = False diff --git a/client/ayon_core/hosts/maya/plugins/create/create_arnold_scene_source.py b/client/ayon_core/hosts/maya/plugins/create/create_arnold_scene_source.py index a9455620b8..dc0ffb02c1 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_arnold_scene_source.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_arnold_scene_source.py @@ -13,7 +13,7 @@ class CreateArnoldSceneSource(plugin.MayaCreator): identifier = "io.openpype.creators.maya.ass" label = "Arnold Scene Source" - family = "ass" + product_type = "ass" icon = "cube" settings_name = "CreateAss" @@ -87,12 +87,12 @@ class CreateArnoldSceneSource(plugin.MayaCreator): return defs - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): from maya import cmds instance = super(CreateArnoldSceneSource, self).create( - subset_name, instance_data, pre_create_data + product_name, instance_data, pre_create_data ) instance_node = instance.get("instance_node") diff --git a/client/ayon_core/hosts/maya/plugins/create/create_assembly.py b/client/ayon_core/hosts/maya/plugins/create/create_assembly.py index 2b78271a49..92df125748 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_assembly.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_assembly.py @@ -6,5 +6,5 @@ class CreateAssembly(plugin.MayaCreator): identifier = "io.openpype.creators.maya.assembly" label = "Assembly" - family = "assembly" + product_type = "assembly" icon = "cubes" diff --git a/client/ayon_core/hosts/maya/plugins/create/create_camera.py b/client/ayon_core/hosts/maya/plugins/create/create_camera.py index 37d5a817a5..4b1265bd3b 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_camera.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_camera.py @@ -10,7 +10,7 @@ class CreateCamera(plugin.MayaCreator): identifier = "io.openpype.creators.maya.camera" label = "Camera" - family = "camera" + product_type = "camera" icon = "video-camera" def get_instance_attr_defs(self): @@ -32,5 +32,5 @@ class CreateCameraRig(plugin.MayaCreator): identifier = "io.openpype.creators.maya.camerarig" label = "Camera Rig" - family = "camerarig" + product_type = "camerarig" icon = "video-camera" diff --git a/client/ayon_core/hosts/maya/plugins/create/create_layout.py b/client/ayon_core/hosts/maya/plugins/create/create_layout.py index bd61fa44c6..6cbc697502 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_layout.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_layout.py @@ -7,7 +7,7 @@ class CreateLayout(plugin.MayaCreator): identifier = "io.openpype.creators.maya.layout" label = "Layout" - family = "layout" + product_type = "layout" icon = "cubes" def get_instance_attr_defs(self): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_look.py b/client/ayon_core/hosts/maya/plugins/create/create_look.py index 4655ec1377..ac3625c38f 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_look.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_look.py @@ -13,7 +13,7 @@ class CreateLook(plugin.MayaCreator): identifier = "io.openpype.creators.maya.look" label = "Look" - family = "look" + product_type = "look" icon = "paint-brush" make_tx = True diff --git a/client/ayon_core/hosts/maya/plugins/create/create_matchmove.py b/client/ayon_core/hosts/maya/plugins/create/create_matchmove.py index 00de553404..44443a8b9f 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_matchmove.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_matchmove.py @@ -15,7 +15,7 @@ class CreateMatchmove(plugin.MayaCreator): identifier = "io.openpype.creators.maya.matchmove" label = "Matchmove" - family = "matchmove" + product_type = "matchmove" icon = "video-camera" def get_instance_attr_defs(self): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_maya_usd.py b/client/ayon_core/hosts/maya/plugins/create/create_maya_usd.py index f6c8a55e68..3f34a541b4 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_maya_usd.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_maya_usd.py @@ -13,7 +13,7 @@ class CreateMayaUsd(plugin.MayaCreator): identifier = "io.openpype.creators.maya.mayausd" label = "Maya USD" - family = "usd" + product_type = "usd" icon = "cubes" description = "Create Maya USD Export" diff --git a/client/ayon_core/hosts/maya/plugins/create/create_mayascene.py b/client/ayon_core/hosts/maya/plugins/create/create_mayascene.py index c4024d3710..cfe46336a2 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_mayascene.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_mayascene.py @@ -7,5 +7,5 @@ class CreateMayaScene(plugin.MayaCreator): identifier = "io.openpype.creators.maya.mayascene" name = "mayaScene" label = "Maya Scene" - family = "mayaScene" + product_type = "mayaScene" icon = "file-archive-o" diff --git a/client/ayon_core/hosts/maya/plugins/create/create_model.py b/client/ayon_core/hosts/maya/plugins/create/create_model.py index 67e6b87190..b47df421f3 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_model.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_model.py @@ -10,7 +10,7 @@ class CreateModel(plugin.MayaCreator): identifier = "io.openpype.creators.maya.model" label = "Model" - family = "model" + product_type = "model" icon = "cube" default_variants = ["Main", "Proxy", "_MD", "_HD", "_LD"] diff --git a/client/ayon_core/hosts/maya/plugins/create/create_multishot_layout.py b/client/ayon_core/hosts/maya/plugins/create/create_multishot_layout.py index d05c5ae9a1..e7b903312f 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_multishot_layout.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_multishot_layout.py @@ -27,7 +27,7 @@ class CreateMultishotLayout(plugin.MayaCreator): """ identifier = "io.openpype.creators.maya.multishotlayout" label = "Multi-shot Layout" - family = "layout" + product_type = "layout" icon = "project-diagram" def get_pre_create_attr_defs(self): @@ -106,7 +106,7 @@ class CreateMultishotLayout(plugin.MayaCreator): default="layout"), ] - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): shots = list( self.get_related_shots(folder_path=pre_create_data["shotParent"]) ) @@ -164,11 +164,12 @@ class CreateMultishotLayout(plugin.MayaCreator): instance_data["task"] = layout_task layout_creator.create( - subset_name=layout_creator.get_subset_name( - layout_creator.get_default_variant(), - self.create_context.get_current_task_name(), + product_name=layout_creator.get_product_name( + self.project_name, asset_doc, - self.project_name), + self.create_context.get_current_task_name(), + layout_creator.get_default_variant(), + ), instance_data=instance_data, pre_create_data={ "groupLoadedAssets": pre_create_data["groupLoadedAssets"] diff --git a/client/ayon_core/hosts/maya/plugins/create/create_multiverse_look.py b/client/ayon_core/hosts/maya/plugins/create/create_multiverse_look.py index 11e13b2748..de604a33b3 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_multiverse_look.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_multiverse_look.py @@ -10,7 +10,7 @@ class CreateMultiverseLook(plugin.MayaCreator): identifier = "io.openpype.creators.maya.mvlook" label = "Multiverse Look" - family = "mvLook" + product_type = "mvLook" icon = "cubes" def get_instance_attr_defs(self): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_multiverse_usd.py b/client/ayon_core/hosts/maya/plugins/create/create_multiverse_usd.py index af0ffa9f23..668700995f 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_multiverse_usd.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_multiverse_usd.py @@ -12,7 +12,7 @@ class CreateMultiverseUsd(plugin.MayaCreator): identifier = "io.openpype.creators.maya.mvusdasset" label = "Multiverse USD Asset" - family = "usd" + product_type = "usd" icon = "cubes" description = "Create Multiverse USD Asset" diff --git a/client/ayon_core/hosts/maya/plugins/create/create_multiverse_usd_comp.py b/client/ayon_core/hosts/maya/plugins/create/create_multiverse_usd_comp.py index 202fbbcbc8..120e6ad920 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_multiverse_usd_comp.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_multiverse_usd_comp.py @@ -11,7 +11,7 @@ class CreateMultiverseUsdComp(plugin.MayaCreator): identifier = "io.openpype.creators.maya.mvusdcomposition" label = "Multiverse USD Composition" - family = "mvUsdComposition" + product_type = "mvUsdComposition" icon = "cubes" def get_instance_attr_defs(self): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_multiverse_usd_over.py b/client/ayon_core/hosts/maya/plugins/create/create_multiverse_usd_over.py index cca2b54392..26208794e3 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_multiverse_usd_over.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_multiverse_usd_over.py @@ -11,7 +11,7 @@ class CreateMultiverseUsdOver(plugin.MayaCreator): identifier = "io.openpype.creators.maya.mvusdoverride" label = "Multiverse USD Override" - family = "mvUsdOverride" + product_type = "mvUsdOverride" icon = "cubes" def get_instance_attr_defs(self): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_pointcache.py b/client/ayon_core/hosts/maya/plugins/create/create_pointcache.py index 832e0bfbc5..05e3a1a29f 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_pointcache.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_pointcache.py @@ -15,7 +15,7 @@ class CreatePointCache(plugin.MayaCreator): identifier = "io.openpype.creators.maya.pointcache" label = "Pointcache" - family = "pointcache" + product_type = "pointcache" icon = "gears" write_color_sets = False write_face_sets = False @@ -76,10 +76,10 @@ class CreatePointCache(plugin.MayaCreator): return defs - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): instance = super(CreatePointCache, self).create( - subset_name, instance_data, pre_create_data + product_name, instance_data, pre_create_data ) instance_node = instance.get("instance_node") diff --git a/client/ayon_core/hosts/maya/plugins/create/create_proxy_abc.py b/client/ayon_core/hosts/maya/plugins/create/create_proxy_abc.py index 8b8cedd7ab..ecc031436c 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_proxy_abc.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_proxy_abc.py @@ -13,7 +13,7 @@ class CreateProxyAlembic(plugin.MayaCreator): identifier = "io.openpype.creators.maya.proxyabc" label = "Proxy Alembic" - family = "proxyAbc" + product_type = "proxyAbc" icon = "gears" write_color_sets = False write_face_sets = False diff --git a/client/ayon_core/hosts/maya/plugins/create/create_redshift_proxy.py b/client/ayon_core/hosts/maya/plugins/create/create_redshift_proxy.py index 72c86a0b74..d99fe5a787 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_redshift_proxy.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_redshift_proxy.py @@ -1,16 +1,16 @@ # -*- coding: utf-8 -*- -"""Creator of Redshift proxy subset types.""" +"""Creator of Redshift proxy product types.""" from ayon_core.hosts.maya.api import plugin, lib from ayon_core.lib import BoolDef class CreateRedshiftProxy(plugin.MayaCreator): - """Create instance of Redshift Proxy subset.""" + """Create instance of Redshift Proxy product.""" identifier = "io.openpype.creators.maya.redshiftproxy" label = "Redshift Proxy" - family = "redshiftproxy" + product_type = "redshiftproxy" icon = "gears" def get_instance_attr_defs(self): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_render.py b/client/ayon_core/hosts/maya/plugins/create/create_render.py index f537f249cd..213d5b543e 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_render.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_render.py @@ -13,7 +13,7 @@ from ayon_core.lib import ( class CreateRenderlayer(plugin.RenderlayerCreator): - """Create and manages renderlayer subset per renderLayer in workfile. + """Create and manages renderlayer product per renderLayer in workfile. This generates a single node in the scene which tells the Creator to if it exists collect Maya rendersetup renderlayers as individual instances. @@ -24,7 +24,7 @@ class CreateRenderlayer(plugin.RenderlayerCreator): """ identifier = "io.openpype.creators.maya.renderlayer" - family = "renderlayer" + product_type = "renderlayer" label = "Render" icon = "eye" @@ -35,9 +35,9 @@ class CreateRenderlayer(plugin.RenderlayerCreator): @classmethod def apply_settings(cls, project_settings): - cls.render_settings = project_settings["maya"]["RenderSettings"] + cls.render_settings = project_settings["maya"]["render_settings"] - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): # Only allow a single render instance to exist if self._get_singleton_node(): raise CreatorError("A Render instance already exists - only " @@ -47,7 +47,7 @@ class CreateRenderlayer(plugin.RenderlayerCreator): if self.render_settings.get("apply_render_settings"): lib_rendersettings.RenderSettings().set_default_renderer_settings() - super(CreateRenderlayer, self).create(subset_name, + super(CreateRenderlayer, self).create(product_name, instance_data, pre_create_data) diff --git a/client/ayon_core/hosts/maya/plugins/create/create_rendersetup.py b/client/ayon_core/hosts/maya/plugins/create/create_rendersetup.py index dc47325a34..3d8d6a7309 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_rendersetup.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_rendersetup.py @@ -7,18 +7,18 @@ class CreateRenderSetup(plugin.MayaCreator): identifier = "io.openpype.creators.maya.rendersetup" label = "Render Setup Preset" - family = "rendersetup" + product_type = "rendersetup" icon = "tablet" def get_pre_create_attr_defs(self): # Do not show the "use_selection" setting from parent class return [] - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): existing_instance = None for instance in self.create_context.instances: - if instance.family == self.family: + if instance.product_type == self.product_type: existing_instance = instance break @@ -26,6 +26,6 @@ class CreateRenderSetup(plugin.MayaCreator): raise CreatorError("A RenderSetup instance already exists - only " "one can be configured.") - super(CreateRenderSetup, self).create(subset_name, + super(CreateRenderSetup, self).create(product_name, instance_data, pre_create_data) diff --git a/client/ayon_core/hosts/maya/plugins/create/create_review.py b/client/ayon_core/hosts/maya/plugins/create/create_review.py index 6f7c0ca802..c4fa045427 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_review.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_review.py @@ -29,14 +29,14 @@ class CreateReview(plugin.MayaCreator): identifier = "io.openpype.creators.maya.review" label = "Review" - family = "review" + product_type = "review" icon = "video-camera" useMayaTimeline = True panZoom = False # Overriding "create" method to prefill values from settings. - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): members = list() if pre_create_data.get("use_selection"): @@ -49,7 +49,7 @@ class CreateReview(plugin.MayaCreator): preset = lib.get_capture_preset( task_name, asset_doc["data"]["tasks"][task_name]["type"], - subset_name, + product_name, self.project_settings, self.log ) @@ -60,11 +60,11 @@ class CreateReview(plugin.MayaCreator): ) with lib.undo_chunk(): - instance_node = cmds.sets(members, name=subset_name) + instance_node = cmds.sets(members, name=product_name) instance_data["instance_node"] = instance_node instance = CreatedInstance( - self.family, - subset_name, + self.product_type, + product_name, instance_data, self) @@ -75,7 +75,7 @@ class CreateReview(plugin.MayaCreator): "review_width": preset["Resolution"]["width"], "review_height": preset["Resolution"]["height"], "isolate": preset["Generic"]["isolate_view"], - "imagePlane": preset["Viewport Options"]["imagePlane"], + "imagePlane": preset["ViewportOptions"]["imagePlane"], "panZoom": preset["Generic"]["pan_zoom"] } for key, value in mapping.items(): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_rig.py b/client/ayon_core/hosts/maya/plugins/create/create_rig.py index e49e3040ba..54be50c169 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_rig.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_rig.py @@ -8,12 +8,12 @@ class CreateRig(plugin.MayaCreator): identifier = "io.openpype.creators.maya.rig" label = "Rig" - family = "rig" + product_type = "rig" icon = "wheelchair" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): - instance = super(CreateRig, self).create(subset_name, + instance = super(CreateRig, self).create(product_name, instance_data, pre_create_data) @@ -21,12 +21,12 @@ class CreateRig(plugin.MayaCreator): self.log.info("Creating Rig instance set up ...") # TODO:change name (_controls_SET -> _rigs_SET) - controls = cmds.sets(name=subset_name + "_controls_SET", empty=True) + controls = cmds.sets(name=product_name + "_controls_SET", empty=True) # TODO:change name (_out_SET -> _geo_SET) - pointcache = cmds.sets(name=subset_name + "_out_SET", empty=True) + pointcache = cmds.sets(name=product_name + "_out_SET", empty=True) skeleton = cmds.sets( - name=subset_name + "_skeletonAnim_SET", empty=True) + name=product_name + "_skeletonAnim_SET", empty=True) skeleton_mesh = cmds.sets( - name=subset_name + "_skeletonMesh_SET", empty=True) + name=product_name + "_skeletonMesh_SET", empty=True) cmds.sets([controls, pointcache, skeleton, skeleton_mesh], forceElement=instance_node) diff --git a/client/ayon_core/hosts/maya/plugins/create/create_setdress.py b/client/ayon_core/hosts/maya/plugins/create/create_setdress.py index dfc38f5d76..0f72d4d184 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_setdress.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_setdress.py @@ -7,7 +7,7 @@ class CreateSetDress(plugin.MayaCreator): identifier = "io.openpype.creators.maya.setdress" label = "Set Dress" - family = "setdress" + product_type = "setdress" icon = "cubes" default_variants = ["Main", "Anim"] diff --git a/client/ayon_core/hosts/maya/plugins/create/create_unreal_skeletalmesh.py b/client/ayon_core/hosts/maya/plugins/create/create_unreal_skeletalmesh.py index 550a9cdb0f..8815c4d23d 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_unreal_skeletalmesh.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_unreal_skeletalmesh.py @@ -14,9 +14,8 @@ class CreateUnrealSkeletalMesh(plugin.MayaCreator): identifier = "io.openpype.creators.maya.unrealskeletalmesh" label = "Unreal - Skeletal Mesh" - family = "skeletalMesh" + product_type = "skeletalMesh" icon = "thumbs-up" - dynamic_subset_keys = ["asset"] # Defined in settings joint_hints = set() @@ -29,23 +28,23 @@ class CreateUnrealSkeletalMesh(plugin.MayaCreator): self.joint_hints = set(settings.get("joint_hints", [])) def get_dynamic_data( - self, variant, task_name, asset_doc, project_name, host_name, instance + self, project_name, asset_doc, task_name, variant, host_name, instance ): """ - The default subset name templates for Unreal include {asset} and thus + The default product name templates for Unreal include {asset} and thus we should pass that along as dynamic data. """ dynamic_data = super(CreateUnrealSkeletalMesh, self).get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name, instance + project_name, asset_doc, task_name, variant, host_name, instance ) dynamic_data["asset"] = asset_doc["name"] return dynamic_data - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): with lib.undo_chunk(): instance = super(CreateUnrealSkeletalMesh, self).create( - subset_name, instance_data, pre_create_data) + product_name, instance_data, pre_create_data) instance_node = instance.get("instance_node") # We reorganize the geometry that was originally added into the diff --git a/client/ayon_core/hosts/maya/plugins/create/create_unreal_staticmesh.py b/client/ayon_core/hosts/maya/plugins/create/create_unreal_staticmesh.py index d1fac03bdf..58ad1e4133 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_unreal_staticmesh.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_unreal_staticmesh.py @@ -9,9 +9,8 @@ class CreateUnrealStaticMesh(plugin.MayaCreator): identifier = "io.openpype.creators.maya.unrealstaticmesh" label = "Unreal - Static Mesh" - family = "staticMesh" + product_type = "staticMesh" icon = "cube" - dynamic_subset_keys = ["asset"] # Defined in settings collision_prefixes = [] @@ -22,23 +21,23 @@ class CreateUnrealStaticMesh(plugin.MayaCreator): self.collision_prefixes = settings["collision_prefixes"] def get_dynamic_data( - self, variant, task_name, asset_doc, project_name, host_name, instance + self, project_name, asset_doc, task_name, variant, host_name, instance ): """ - The default subset name templates for Unreal include {asset} and thus + The default product name templates for Unreal include {asset} and thus we should pass that along as dynamic data. """ dynamic_data = super(CreateUnrealStaticMesh, self).get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name, instance + project_name, asset_doc, task_name, variant, host_name, instance ) dynamic_data["asset"] = asset_doc["name"] return dynamic_data - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): with lib.undo_chunk(): instance = super(CreateUnrealStaticMesh, self).create( - subset_name, instance_data, pre_create_data) + product_name, instance_data, pre_create_data) instance_node = instance.get("instance_node") # We reorganize the geometry that was originally added into the diff --git a/client/ayon_core/hosts/maya/plugins/create/create_unreal_yeticache.py b/client/ayon_core/hosts/maya/plugins/create/create_unreal_yeticache.py index 4cd7288cfc..1eac8a5ea9 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_unreal_yeticache.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_unreal_yeticache.py @@ -10,7 +10,7 @@ class CreateYetiCache(plugin.MayaCreator): identifier = "io.openpype.creators.maya.unrealyeticache" label = "Unreal - Yeti Cache" - family = "yeticacheUE" + product_type = "yeticacheUE" icon = "pagelines" def get_instance_attr_defs(self): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_vrayproxy.py b/client/ayon_core/hosts/maya/plugins/create/create_vrayproxy.py index 7d16c5bc2c..d565ec37e0 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_vrayproxy.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_vrayproxy.py @@ -10,7 +10,7 @@ class CreateVrayProxy(plugin.MayaCreator): identifier = "io.openpype.creators.maya.vrayproxy" label = "VRay Proxy" - family = "vrayproxy" + product_type = "vrayproxy" icon = "gears" vrmesh = True diff --git a/client/ayon_core/hosts/maya/plugins/create/create_vrayscene.py b/client/ayon_core/hosts/maya/plugins/create/create_vrayscene.py index 3642f5f689..cf5e7b5364 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_vrayscene.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_vrayscene.py @@ -14,7 +14,7 @@ class CreateVRayScene(plugin.RenderlayerCreator): identifier = "io.openpype.creators.maya.vrayscene" - family = "vrayscene" + product_type = "vrayscene" label = "VRay Scene" icon = "cubes" @@ -23,15 +23,15 @@ class CreateVRayScene(plugin.RenderlayerCreator): @classmethod def apply_settings(cls, project_settings): - cls.render_settings = project_settings["maya"]["RenderSettings"] + cls.render_settings = project_settings["maya"]["render_settings"] - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): # Only allow a single render instance to exist if self._get_singleton_node(): raise CreatorError("A Render instance already exists - only " "one can be configured.") - super(CreateVRayScene, self).create(subset_name, + super(CreateVRayScene, self).create(product_name, instance_data, pre_create_data) diff --git a/client/ayon_core/hosts/maya/plugins/create/create_workfile.py b/client/ayon_core/hosts/maya/plugins/create/create_workfile.py index 396ad6ffbb..5eb32e1c90 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_workfile.py @@ -10,7 +10,7 @@ class CreateWorkfile(plugin.MayaCreatorBase, AutoCreator): """Workfile auto-creator.""" identifier = "io.openpype.creators.maya.workfile" label = "Workfile" - family = "workfile" + product_type = "workfile" icon = "fa5.file" default_variant = "Main" @@ -36,8 +36,12 @@ class CreateWorkfile(plugin.MayaCreatorBase, AutoCreator): if current_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + variant, + host_name, ) data = { "folderPath": asset_name, @@ -46,12 +50,16 @@ class CreateWorkfile(plugin.MayaCreatorBase, AutoCreator): } data.update( self.get_dynamic_data( - variant, task_name, asset_doc, - project_name, host_name, current_instance) + project_name, + asset_doc, + task_name, + variant, + host_name, + current_instance) ) self.log.info("Auto-creating workfile instance...") current_instance = CreatedInstance( - self.family, subset_name, data, self + self.product_type, product_name, data, self ) self._add_instance_to_context(current_instance) elif ( @@ -60,19 +68,25 @@ class CreateWorkfile(plugin.MayaCreatorBase, AutoCreator): ): # Update instance context if is not the same asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + variant, + host_name, ) asset_name = get_asset_name_identifier(asset_doc) current_instance["folderPath"] = asset_name current_instance["task"] = task_name - current_instance["subset"] = subset_name + current_instance["productName"] = product_name def collect_instances(self): - self.cache_subsets(self.collection_shared_data) - cached_subsets = self.collection_shared_data["maya_cached_subsets"] - for node in cached_subsets.get(self.identifier, []): + self.cache_instance_data(self.collection_shared_data) + cached_instances = ( + self.collection_shared_data["maya_cached_instance_data"] + ) + for node in cached_instances.get(self.identifier, []): node_data = self.read_instance_node(node) created_instance = CreatedInstance.from_existing(node_data, self) diff --git a/client/ayon_core/hosts/maya/plugins/create/create_xgen.py b/client/ayon_core/hosts/maya/plugins/create/create_xgen.py index 4e0d41b689..fec2f07456 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_xgen.py @@ -6,5 +6,5 @@ class CreateXgen(plugin.MayaCreator): identifier = "io.openpype.creators.maya.xgen" label = "Xgen" - family = "xgen" + product_type = "xgen" icon = "pagelines" diff --git a/client/ayon_core/hosts/maya/plugins/create/create_yeti_cache.py b/client/ayon_core/hosts/maya/plugins/create/create_yeti_cache.py index 82b18f113a..bf20acaca8 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_yeti_cache.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_yeti_cache.py @@ -10,7 +10,7 @@ class CreateYetiCache(plugin.MayaCreator): identifier = "io.openpype.creators.maya.yeticache" label = "Yeti Cache" - family = "yeticache" + product_type = "yeticache" icon = "pagelines" def get_instance_attr_defs(self): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_yeti_rig.py b/client/ayon_core/hosts/maya/plugins/create/create_yeti_rig.py index df3c89a64d..dfe224ceb1 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_yeti_rig.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_yeti_rig.py @@ -11,13 +11,13 @@ class CreateYetiRig(plugin.MayaCreator): identifier = "io.openpype.creators.maya.yetirig" label = "Yeti Rig" - family = "yetiRig" + product_type = "yetiRig" icon = "usb" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): with lib.undo_chunk(): - instance = super(CreateYetiRig, self).create(subset_name, + instance = super(CreateYetiRig, self).create(product_name, instance_data, pre_create_data) instance_node = instance.get("instance_node") diff --git a/client/ayon_core/hosts/maya/plugins/inventory/connect_geometry.py b/client/ayon_core/hosts/maya/plugins/inventory/connect_geometry.py index 13c9de4693..054c84bea2 100644 --- a/client/ayon_core/hosts/maya/plugins/inventory/connect_geometry.py +++ b/client/ayon_core/hosts/maya/plugins/inventory/connect_geometry.py @@ -9,7 +9,7 @@ class ConnectGeometry(InventoryAction): Source container will connect to the target containers, by searching for matching geometry IDs (cbid). - Source containers are of family; "animation" and "pointcache". + Source containers are of product type: "animation" and "pointcache". The connection with be done with a live world space blendshape. """ @@ -27,19 +27,19 @@ class ConnectGeometry(InventoryAction): return # Categorize containers by family. - containers_by_family = {} + containers_by_product_type = {} for container in containers: - family = get_representation_context( + product_type = get_representation_context( container["representation"] )["subset"]["data"]["family"] try: - containers_by_family[family].append(container) + containers_by_product_type[product_type].append(container) except KeyError: - containers_by_family[family] = [container] + containers_by_product_type[product_type] = [container] # Validate to only 1 source container. - source_containers = containers_by_family.get("animation", []) - source_containers += containers_by_family.get("pointcache", []) + source_containers = containers_by_product_type.get("animation", []) + source_containers += containers_by_product_type.get("pointcache", []) source_container_namespaces = [ x["namespace"] for x in source_containers ] @@ -57,8 +57,8 @@ class ConnectGeometry(InventoryAction): # Collect matching geometry transforms based cbId attribute. target_containers = [] - for family, containers in containers_by_family.items(): - if family in ["animation", "pointcache"]: + for product_type, containers in containers_by_product_type.items(): + if product_type in ["animation", "pointcache"]: continue target_containers.extend(containers) diff --git a/client/ayon_core/hosts/maya/plugins/inventory/connect_xgen.py b/client/ayon_core/hosts/maya/plugins/inventory/connect_xgen.py index 2a198addf2..fa6440fc37 100644 --- a/client/ayon_core/hosts/maya/plugins/inventory/connect_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/inventory/connect_xgen.py @@ -23,20 +23,20 @@ class ConnectXgen(InventoryAction): self.display_warning(message) return - # Categorize containers by family. - containers_by_family = {} + # Categorize containers by product type. + containers_by_product_type = {} for container in containers: - family = get_representation_context( + product_type = get_representation_context( container["representation"] )["subset"]["data"]["family"] try: - containers_by_family[family].append(container) + containers_by_product_type[product_type].append(container) except KeyError: - containers_by_family[family] = [container] + containers_by_product_type[product_type] = [container] # Validate to only 1 source container. - source_containers = containers_by_family.get("animation", []) - source_containers += containers_by_family.get("pointcache", []) + source_containers = containers_by_product_type.get("animation", []) + source_containers += containers_by_product_type.get("pointcache", []) source_container_namespaces = [ x["namespace"] for x in source_containers ] @@ -68,8 +68,8 @@ class ConnectXgen(InventoryAction): # Target containers. target_containers = [] - for family, containers in containers_by_family.items(): - if family in ["animation", "pointcache"]: + for product_type, containers in containers_by_product_type.items(): + if product_type in ["animation", "pointcache"]: continue target_containers.extend(containers) diff --git a/client/ayon_core/hosts/maya/plugins/inventory/connect_yeti_rig.py b/client/ayon_core/hosts/maya/plugins/inventory/connect_yeti_rig.py index 19498e5c1c..66807e9d5d 100644 --- a/client/ayon_core/hosts/maya/plugins/inventory/connect_yeti_rig.py +++ b/client/ayon_core/hosts/maya/plugins/inventory/connect_yeti_rig.py @@ -26,17 +26,17 @@ class ConnectYetiRig(InventoryAction): self.display_warning(message) return - # Categorize containers by family. - containers_by_family = defaultdict(list) + # Categorize containers by product type. + containers_by_product_type = defaultdict(list) for container in containers: - family = get_representation_context( + product_type = get_representation_context( container["representation"] )["subset"]["data"]["family"] - containers_by_family[family].append(container) + containers_by_product_type[product_type].append(container) # Validate to only 1 source container. - source_containers = containers_by_family.get("animation", []) - source_containers += containers_by_family.get("pointcache", []) + source_containers = containers_by_product_type.get("animation", []) + source_containers += containers_by_product_type.get("pointcache", []) source_container_namespaces = [ x["namespace"] for x in source_containers ] @@ -57,7 +57,7 @@ class ConnectYetiRig(InventoryAction): target_ids = {} inputs = [] - yeti_rig_containers = containers_by_family.get("yetiRig") + yeti_rig_containers = containers_by_product_type.get("yetiRig") if not yeti_rig_containers: self.display_warning( "Select at least one yetiRig container" diff --git a/client/ayon_core/hosts/maya/plugins/load/_load_animation.py b/client/ayon_core/hosts/maya/plugins/load/_load_animation.py index bf7f3859e1..e6dc1e520a 100644 --- a/client/ayon_core/hosts/maya/plugins/load/_load_animation.py +++ b/client/ayon_core/hosts/maya/plugins/load/_load_animation.py @@ -7,7 +7,7 @@ def _process_reference(file_url, name, namespace, options): Args: file_url (str): fileapth of the objects to be loaded - name (str): subset name + name (str): product name namespace (str): namespace options (dict): dict of storing the param @@ -16,7 +16,7 @@ def _process_reference(file_url, name, namespace, options): """ from ayon_core.hosts.maya.api.lib import unique_namespace # Get name from asset being loaded - # Assuming name is subset name from the animation, we split the number + # Assuming name is product name from the animation, we split the number # suffix from the name to ensure the namespace is unique name = name.split("_")[0] ext = file_url.split(".")[-1] diff --git a/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py b/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py index c690d1c205..8fd3ad4979 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py @@ -6,7 +6,6 @@ import maya.cmds as cmds from ayon_core.settings import get_project_settings from ayon_core.pipeline import ( load, - legacy_io, get_representation_path ) from ayon_core.hosts.maya.api.lib import ( @@ -16,6 +15,7 @@ from ayon_core.hosts.maya.api.lib import ( convert_to_maya_fps ) from ayon_core.hosts.maya.api.pipeline import containerise +from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type def is_sequence(files): @@ -26,11 +26,6 @@ def is_sequence(files): return sequence -def get_current_session_fps(): - session_fps = float(legacy_io.Session.get('AVALON_FPS', 25)) - return convert_to_maya_fps(session_fps) - - class ArnoldStandinLoader(load.LoaderPlugin): """Load as Arnold standin""" @@ -72,11 +67,12 @@ class ArnoldStandinLoader(load.LoaderPlugin): # Set color. settings = get_project_settings(context["project"]["name"]) - color = settings['maya']['load']['colors'].get('ass') + color = get_load_color_for_product_type("ass", settings) if color is not None: + red, green, blue = color cmds.setAttr(root + ".useOutlinerColor", True) cmds.setAttr( - root + ".outlinerColor", color[0], color[1], color[2] + root + ".outlinerColor", red, green, blue ) with maintained_selection(): @@ -99,7 +95,7 @@ class ArnoldStandinLoader(load.LoaderPlugin): sequence = is_sequence(os.listdir(os.path.dirname(repre_path))) cmds.setAttr(standin_shape + ".useFrameExtension", sequence) - fps = float(version["data"].get("fps"))or get_current_session_fps() + fps = float(version["data"].get("fps")) or 25 cmds.setAttr(standin_shape + ".abcFPS", fps) nodes = [root, standin, standin_shape] @@ -181,7 +177,7 @@ class ArnoldStandinLoader(load.LoaderPlugin): return proxy_path, string_replace_operator - def update(self, container, representation): + def update(self, container, context): # Update the standin members = cmds.sets(container['objectName'], query=True) for member in members: @@ -194,7 +190,8 @@ class ArnoldStandinLoader(load.LoaderPlugin): if cmds.nodeType(shapes[0]) == "aiStandIn": standin = shapes[0] - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) proxy_basename, proxy_path = self._get_proxy_path(path) # Whether there is proxy or so, we still update the string operator. @@ -220,12 +217,12 @@ class ArnoldStandinLoader(load.LoaderPlugin): cmds.setAttr( container["objectName"] + ".representation", - str(representation["_id"]), + str(repre_doc["_id"]), type="string" ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): members = cmds.sets(container['objectName'], query=True) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_assembly.py b/client/ayon_core/hosts/maya/plugins/load/load_assembly.py index e119dfe1c3..1f06655dad 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_assembly.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_assembly.py @@ -49,9 +49,9 @@ class AssemblyLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): - return setdress.update_package(container, representation) + return setdress.update_package(container, context) def remove(self, container): """Remove all sub containers""" diff --git a/client/ayon_core/hosts/maya/plugins/load/load_audio.py b/client/ayon_core/hosts/maya/plugins/load/load_audio.py index deeeac66f2..df811a585c 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_audio.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_audio.py @@ -45,7 +45,8 @@ class AudioLoader(load.LoaderPlugin): loader=self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): + repre_doc = context["representation"] members = get_container_members(container) audio_nodes = cmds.ls(members, type="audio") @@ -60,7 +61,7 @@ class AudioLoader(load.LoaderPlugin): ) activate_sound = current_sound == audio_node - path = get_representation_path(representation) + path = get_representation_path(repre_doc) cmds.sound( audio_node, @@ -93,12 +94,12 @@ class AudioLoader(load.LoaderPlugin): cmds.setAttr( container["objectName"] + ".representation", - str(representation["_id"]), + str(repre_doc["_id"]), type="string" ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): members = cmds.sets(container['objectName'], query=True) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py b/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py index 00a76d374b..9453c9c9c6 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py @@ -9,6 +9,7 @@ from ayon_core.pipeline import ( get_representation_path ) from ayon_core.settings import get_project_settings +from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type class GpuCacheLoader(load.LoaderPlugin): @@ -39,13 +40,12 @@ class GpuCacheLoader(load.LoaderPlugin): project_name = context["project"]["name"] settings = get_project_settings(project_name) - colors = settings['maya']['load']['colors'] - c = colors.get('model') - if c is not None: + color = get_load_color_for_product_type("model", settings) + if color is not None: + red, green, blue = color cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr( - root + ".outlinerColor", - (float(c[0]) / 255), (float(c[1]) / 255), (float(c[2]) / 255) + root + ".outlinerColor", red, green, blue ) # Create transform with shape @@ -74,8 +74,9 @@ class GpuCacheLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): - path = get_representation_path(representation) + def update(self, container, context): + repre_doc = context["representation"] + path = get_representation_path(repre_doc) # Update the cache members = cmds.sets(container['objectName'], query=True) @@ -87,11 +88,11 @@ class GpuCacheLoader(load.LoaderPlugin): cmds.setAttr(cache + ".cacheFileName", path, type="string") cmds.setAttr(container["objectName"] + ".representation", - str(representation["_id"]), + str(repre_doc["_id"]), type="string") - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): members = cmds.sets(container['objectName'], query=True) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_image.py b/client/ayon_core/hosts/maya/plugins/load/load_image.py index aedeb63e3d..7b324986f0 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_image.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_image.py @@ -146,23 +146,23 @@ class FileNodeLoader(load.LoaderPlugin): loader=self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): + repre_doc = context["representation"] members = cmds.sets(container['objectName'], query=True) file_node = cmds.ls(members, type="file")[0] - context = get_representation_context(representation) self._apply_representation_context(context, file_node) # Update representation cmds.setAttr( container["objectName"] + ".representation", - str(representation["_id"]), + str(repre_doc["_id"]), type="string" ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): members = cmds.sets(container['objectName'], query=True) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py b/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py index fb27e6597a..2366f6edd7 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py @@ -205,34 +205,26 @@ class ImagePlaneLoader(load.LoaderPlugin): loader=self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): + asset_doc = context["asset"] + repre_doc = context["representation"] members = get_container_members(container) image_planes = cmds.ls(members, type="imagePlane") assert image_planes, "Image plane not found." image_plane_shape = image_planes[0] - path = get_representation_path(representation) + path = get_representation_path(repre_doc) cmds.setAttr("{}.imageName".format(image_plane_shape), path, type="string") cmds.setAttr("{}.representation".format(container["objectName"]), - str(representation["_id"]), + str(repre_doc["_id"]), type="string") # Set frame range. - project_name = get_current_project_name() - version = get_version_by_id( - project_name, representation["parent"], fields=["parent"] - ) - subset = get_subset_by_id( - project_name, version["parent"], fields=["parent"] - ) - asset = get_asset_by_id( - project_name, subset["parent"], fields=["parent"] - ) - start_frame = asset["data"]["frameStart"] - end_frame = asset["data"]["frameEnd"] + start_frame = asset_doc["data"]["frameStart"] + end_frame = asset_doc["data"]["frameEnd"] for attr, value in { "frameOffset": 0, @@ -243,8 +235,8 @@ class ImagePlaneLoader(load.LoaderPlugin): plug = "{}.{}".format(image_plane_shape, attr) cmds.setAttr(plug, value) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): members = cmds.sets(container['objectName'], query=True) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_look.py b/client/ayon_core/hosts/maya/plugins/load/load_look.py index ba5891469d..fb5be14aa1 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_look.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_look.py @@ -43,10 +43,10 @@ class LookLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): self[:] = nodes - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """ Called by Scene Inventory when look should be updated to current version. @@ -56,7 +56,7 @@ class LookLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): Args: container: object that has look to be updated - representation: (dict): relationship data to get proper + context: (dict): relationship data to get proper representation from DB and persisted data in .json Returns: @@ -72,15 +72,16 @@ class LookLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): orig_nodes = set(self._get_nodes_with_shader(shader_nodes)) # Trigger the regular reference update on the ReferenceLoader - super(LookLoader, self).update(container, representation) + super(LookLoader, self).update(container, context) # get new applied shaders and nodes from new version shader_nodes = cmds.ls(members, type='shadingEngine') nodes = set(self._get_nodes_with_shader(shader_nodes)) + version_doc = context["version"] project_name = get_current_project_name() json_representation = get_representation_by_name( - project_name, "json", representation["parent"] + project_name, "json", version_doc["_id"] ) # Load relationships diff --git a/client/ayon_core/hosts/maya/plugins/load/load_maya_usd.py b/client/ayon_core/hosts/maya/plugins/load/load_maya_usd.py index c2bea1501c..cb9fde7b33 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_maya_usd.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_maya_usd.py @@ -69,7 +69,7 @@ class MayaUsdLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): # type: (dict, dict) -> None """Update container with specified representation.""" node = container['objectName'] @@ -78,16 +78,17 @@ class MayaUsdLoader(load.LoaderPlugin): members = cmds.sets(node, query=True) or [] shapes = cmds.ls(members, type="mayaUsdProxyShape") - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) for shape in shapes: cmds.setAttr("{}.filePath".format(shape), path, type="string") cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + str(repre_doc["_id"]), type="string") - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): # type: (dict) -> None diff --git a/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd.py b/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd.py index a9ba2b8773..64e6048c31 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd.py @@ -60,7 +60,7 @@ class MultiverseUsdLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): # type: (dict, dict) -> None """Update container with specified representation.""" node = container['objectName'] @@ -70,7 +70,9 @@ class MultiverseUsdLoader(load.LoaderPlugin): shapes = cmds.ls(members, type="mvUsdCompoundShape") assert shapes, "Cannot find mvUsdCompoundShape in container" - project_name = representation["context"]["project"]["name"] + project_name = context["project"]["name"] + repre_doc = context["representation"] + path = get_representation_path(repre_doc) prev_representation_id = cmds.getAttr("{}.representation".format(node)) prev_representation = get_representation_by_id(project_name, prev_representation_id) @@ -89,18 +91,17 @@ class MultiverseUsdLoader(load.LoaderPlugin): "Couldn't find matching path (or too many)" prev_path_idx = asset_paths.index(prev_path) - path = get_representation_path(representation) asset_paths[prev_path_idx] = path multiverse.SetUsdCompoundAssetPaths(shape, asset_paths) cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + str(repre_doc["_id"]), type="string") mel.eval('refreshEditorTemplates;') - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): # type: (dict) -> None diff --git a/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd_over.py b/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd_over.py index d448dc74a8..6de03fe306 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd_over.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd_over.py @@ -71,7 +71,7 @@ class MultiverseUsdOverLoader(load.LoaderPlugin): return container - def update(self, container, representation): + def update(self, container, context): # type: (dict, dict) -> None """Update container with specified representation.""" @@ -88,13 +88,14 @@ class MultiverseUsdOverLoader(load.LoaderPlugin): mvShape = container['mvUsdCompoundShape'] assert mvShape, "Missing mv source" - project_name = representation["context"]["project"]["name"] + project_name = context["project"]["name"] + repre_doc = context["representation"] prev_representation_id = cmds.getAttr("{}.representation".format(node)) prev_representation = get_representation_by_id(project_name, prev_representation_id) prev_path = os.path.normpath(prev_representation["data"]["path"]) - path = get_representation_path(representation) + path = get_representation_path(repre_doc) for shape in shapes: asset_paths = multiverse.GetUsdCompoundAssetPaths(shape) @@ -107,12 +108,12 @@ class MultiverseUsdOverLoader(load.LoaderPlugin): multiverse.SetUsdCompoundAssetPaths(shape, asset_paths) cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + str(repre_doc["_id"]), type="string") mel.eval('refreshEditorTemplates;') - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): # type: (dict) -> None diff --git a/client/ayon_core/hosts/maya/plugins/load/load_redshift_proxy.py b/client/ayon_core/hosts/maya/plugins/load/load_redshift_proxy.py index dd378602c9..feb63ae4be 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_redshift_proxy.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_redshift_proxy.py @@ -16,6 +16,7 @@ from ayon_core.hosts.maya.api.lib import ( unique_namespace ) from ayon_core.hosts.maya.api.pipeline import containerise +from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type class RedshiftProxyLoader(load.LoaderPlugin): @@ -32,9 +33,9 @@ class RedshiftProxyLoader(load.LoaderPlugin): def load(self, context, name=None, namespace=None, options=None): """Plugin entry point.""" try: - family = context["representation"]["context"]["family"] + product_type = context["representation"]["context"]["family"] except ValueError: - family = "redshiftproxy" + product_type = "redshiftproxy" asset_name = context['asset']["name"] namespace = namespace or unique_namespace( @@ -59,12 +60,13 @@ class RedshiftProxyLoader(load.LoaderPlugin): # colour the group node project_name = context["project"]["name"] settings = get_project_settings(project_name) - colors = settings['maya']['load']['colors'] - c = colors.get(family) - if c is not None: + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1) - cmds.setAttr("{0}.outlinerColor".format(group_node), - c[0], c[1], c[2]) + cmds.setAttr( + "{0}.outlinerColor".format(group_node), red, green, blue + ) return containerise( name=name, @@ -73,7 +75,7 @@ class RedshiftProxyLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): node = container['objectName'] assert cmds.objExists(node), "Missing container" @@ -81,8 +83,8 @@ class RedshiftProxyLoader(load.LoaderPlugin): members = cmds.sets(node, query=True) or [] rs_meshes = cmds.ls(members, type="RedshiftProxyMesh") assert rs_meshes, "Cannot find RedshiftProxyMesh in container" - - filename = get_representation_path(representation) + repre_doc = context["representation"] + filename = get_representation_path(repre_doc) for rs_mesh in rs_meshes: cmds.setAttr("{}.fileName".format(rs_mesh), @@ -91,7 +93,7 @@ class RedshiftProxyLoader(load.LoaderPlugin): # Update metadata cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + str(repre_doc["_id"]), type="string") def remove(self, container): @@ -111,8 +113,8 @@ class RedshiftProxyLoader(load.LoaderPlugin): self.log.warning("Namespace not deleted because it " "still has members: %s", namespace) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def create_rs_proxy(self, name, path): """Creates Redshift Proxies showing a proxy object. diff --git a/client/ayon_core/hosts/maya/plugins/load/load_reference.py b/client/ayon_core/hosts/maya/plugins/load/load_reference.py index 36bd2e5969..75f5cee5a5 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_reference.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_reference.py @@ -1,4 +1,3 @@ -import os import difflib import contextlib @@ -6,7 +5,7 @@ from maya import cmds import qargparse from ayon_core.settings import get_project_settings -import ayon_core.hosts.maya.api.plugin +from ayon_core.hosts.maya.api import plugin from ayon_core.hosts.maya.api.lib import ( maintained_selection, get_container_members, @@ -87,7 +86,7 @@ def preserve_modelpanel_cameras(container, log=None): cmds.modelPanel(panel, edit=True, camera=new_camera) -class ReferenceLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): +class ReferenceLoader(plugin.ReferenceLoader): """Reference file""" families = ["model", @@ -117,9 +116,9 @@ class ReferenceLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): import maya.cmds as cmds try: - family = context["representation"]["context"]["family"] + product_type = context["representation"]["context"]["family"] except ValueError: - family = "model" + product_type = "model" project_name = context["project"]["name"] # True by default to keep legacy behaviours @@ -170,8 +169,9 @@ class ReferenceLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): children=True, fullPath=True) or [] - if family not in {"layout", "setdress", - "mayaAscii", "mayaScene"}: + if product_type not in { + "layout", "setdress", "mayaAscii", "mayaScene" + }: # QUESTION Why do we need to exclude these families? with parent_nodes(roots, parent=None): cmds.xform(group_name, zeroTransformPivots=True) @@ -185,14 +185,18 @@ class ReferenceLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): "{}.displayHandle".format(group_name), display_handle ) - colors = settings['maya']['load']['colors'] - c = colors.get(family) - if c is not None: + color = plugin.get_load_color_for_product_type( + product_type, settings + ) + if color is not None: + red, green, blue = color cmds.setAttr("{}.useOutlinerColor".format(group_name), 1) - cmds.setAttr("{}.outlinerColor".format(group_name), - (float(c[0]) / 255), - (float(c[1]) / 255), - (float(c[2]) / 255)) + cmds.setAttr( + "{}.outlinerColor".format(group_name), + red, + green, + blue + ) cmds.setAttr( "{}.displayHandle".format(group_name), display_handle @@ -214,7 +218,7 @@ class ReferenceLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): cmds.setAttr("{}.selectHandleY".format(group_name), cy) cmds.setAttr("{}.selectHandleZ".format(group_name), cz) - if family == "rig": + if product_type == "rig": self._post_process_rig(namespace, context, options) else: if "translate" in options: @@ -227,12 +231,12 @@ class ReferenceLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): *options["translate"]) return new_nodes - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): with preserve_modelpanel_cameras(container, log=self.log): - super(ReferenceLoader, self).update(container, representation) + super(ReferenceLoader, self).update(container, context) # We also want to lock camera transforms on any new cameras in the # reference or for a camera which might have changed names. diff --git a/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py b/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py index e77e270663..58f161afc1 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py @@ -84,14 +84,15 @@ class RenderSetupLoader(load.LoaderPlugin): # Already implicitly deleted by Maya upon removing reference pass - def update(self, container, representation): + def update(self, container, context): """Update RenderSetup setting by overwriting existing settings.""" lib.show_message( "Render setup update", "Render setup setting will be overwritten by new version. All " "setting specified by user not included in loaded version " "will be lost.") - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) with open(path, "r") as file: try: renderSetup.instance().decode( @@ -103,10 +104,10 @@ class RenderSetupLoader(load.LoaderPlugin): # Update metadata node = container["objectName"] cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + str(repre_doc["_id"]), type="string") self.log.info("... updated") - def switch(self, container, representation): + def switch(self, container, context): """Switch representations.""" - self.update(container, representation) + self.update(container, context) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py index 98f98330d7..3eec09eb7d 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py @@ -5,6 +5,7 @@ from ayon_core.pipeline import ( load, get_representation_path ) +from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type # TODO aiVolume doesn't automatically set velocity fps correctly, set manual? @@ -25,9 +26,9 @@ class LoadVDBtoArnold(load.LoaderPlugin): from ayon_core.hosts.maya.api.lib import unique_namespace try: - family = context["representation"]["context"]["family"] + product_type = context["representation"]["context"]["family"] except ValueError: - family = "vdbcache" + product_type = "vdbcache" # Check if the plugin for arnold is available on the pc try: @@ -50,16 +51,11 @@ class LoadVDBtoArnold(load.LoaderPlugin): project_name = context["project"]["name"] settings = get_project_settings(project_name) - colors = settings['maya']['load']['colors'] - - c = colors.get(family) - if c is not None: + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color cmds.setAttr(root + ".useOutlinerColor", 1) - cmds.setAttr(root + ".outlinerColor", - (float(c[0]) / 255), - (float(c[1]) / 255), - (float(c[2]) / 255) - ) + cmds.setAttr(root + ".outlinerColor", red, green, blue) # Create VRayVolumeGrid grid_node = cmds.createNode("aiVolume", @@ -85,11 +81,13 @@ class LoadVDBtoArnold(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from maya import cmds - path = get_representation_path(representation) + repre_doc = context["representation"] + + path = get_representation_path(repre_doc) # Find VRayVolumeGrid members = cmds.sets(container['objectName'], query=True) @@ -97,15 +95,15 @@ class LoadVDBtoArnold(load.LoaderPlugin): assert len(grid_nodes) == 1, "This is a bug" # Update the VRayVolumeGrid - self._set_path(grid_nodes[0], path=path, representation=representation) + self._set_path(grid_nodes[0], path=path, representation=repre_doc) # Update container representation cmds.setAttr(container["objectName"] + ".representation", - str(representation["_id"]), + str(repre_doc["_id"]), type="string") - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_redshift.py b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_redshift.py index 426e85cf7c..f58d9e5565 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_redshift.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_redshift.py @@ -5,6 +5,7 @@ from ayon_core.pipeline import ( load, get_representation_path ) +from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type class LoadVDBtoRedShift(load.LoaderPlugin): @@ -31,9 +32,9 @@ class LoadVDBtoRedShift(load.LoaderPlugin): from ayon_core.hosts.maya.api.lib import unique_namespace try: - family = context["representation"]["context"]["family"] + product_type = context["representation"]["context"]["family"] except ValueError: - family = "vdbcache" + product_type = "vdbcache" # Check if the plugin for redshift is available on the pc try: @@ -69,16 +70,11 @@ class LoadVDBtoRedShift(load.LoaderPlugin): project_name = context["project"]["name"] settings = get_project_settings(project_name) - colors = settings['maya']['load']['colors'] - - c = colors.get(family) - if c is not None: + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color cmds.setAttr(root + ".useOutlinerColor", 1) - cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + cmds.setAttr(root + ".outlinerColor", red, green, blue) # Create VR volume_node = cmds.createNode("RedshiftVolumeShape", @@ -99,10 +95,11 @@ class LoadVDBtoRedShift(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from maya import cmds - path = get_representation_path(representation) + repre_doc = context["representation"] + path = get_representation_path(repre_doc) # Find VRayVolumeGrid members = cmds.sets(container['objectName'], query=True) @@ -110,11 +107,11 @@ class LoadVDBtoRedShift(load.LoaderPlugin): assert len(grid_nodes) == 1, "This is a bug" # Update the VRayVolumeGrid - self._set_path(grid_nodes[0], path=path, representation=representation) + self._set_path(grid_nodes[0], path=path, representation=repre_doc) # Update container representation cmds.setAttr(container["objectName"] + ".representation", - str(representation["_id"]), + str(repre_doc["_id"]), type="string") def remove(self, container): @@ -133,8 +130,8 @@ class LoadVDBtoRedShift(load.LoaderPlugin): except RuntimeError: pass - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) @staticmethod def _set_path(grid_node, diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_vray.py b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_vray.py index ca0519900b..6e82bbd5e2 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_vray.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_vray.py @@ -5,6 +5,7 @@ from ayon_core.pipeline import ( load, get_representation_path ) +from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type from maya import cmds @@ -94,9 +95,9 @@ class LoadVDBtoVRay(load.LoaderPlugin): ) try: - family = context["representation"]["context"]["family"] + product_type = context["representation"]["context"]["family"] except ValueError: - family = "vdbcache" + product_type = "vdbcache" # Ensure V-ray is loaded with the vrayvolumegrid if not cmds.pluginInfo("vrayformaya", query=True, loaded=True): @@ -129,15 +130,11 @@ class LoadVDBtoVRay(load.LoaderPlugin): project_name = context["project"]["name"] settings = get_project_settings(project_name) - colors = settings['maya']['load']['colors'] - - c = colors.get(family) - if c is not None: + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color cmds.setAttr(root + ".useOutlinerColor", 1) - cmds.setAttr(root + ".outlinerColor", - float(c[0]) / 255, - float(c[1]) / 255, - float(c[2]) / 255) + cmds.setAttr(root + ".outlinerColor", red, green, blue) # Create VRayVolumeGrid grid_node = cmds.createNode("VRayVolumeGrid", @@ -257,9 +254,10 @@ class LoadVDBtoVRay(load.LoaderPlugin): restored_mapping, type="string") - def update(self, container, representation): + def update(self, container, context): + repre_doc = context["representation"] - path = get_representation_path(representation) + path = get_representation_path(repre_doc) # Find VRayVolumeGrid members = cmds.sets(container['objectName'], query=True) @@ -272,11 +270,11 @@ class LoadVDBtoVRay(load.LoaderPlugin): # Update container representation cmds.setAttr(container["objectName"] + ".representation", - str(representation["_id"]), + str(repre_doc["_id"]), type="string") - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vrayproxy.py b/client/ayon_core/hosts/maya/plugins/load/load_vrayproxy.py index 9b36303b64..86ea8004ba 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vrayproxy.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vrayproxy.py @@ -22,6 +22,7 @@ from ayon_core.hosts.maya.api.lib import ( unique_namespace ) from ayon_core.hosts.maya.api.pipeline import containerise +from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type class VRayProxyLoader(load.LoaderPlugin): @@ -48,9 +49,9 @@ class VRayProxyLoader(load.LoaderPlugin): """ try: - family = context["representation"]["context"]["family"] + product_type = context["representation"]["context"]["family"] except ValueError: - family = "vrayproxy" + product_type = "vrayproxy" # get all representations for this version filename = self._get_abc(context["version"]["_id"]) @@ -80,15 +81,12 @@ class VRayProxyLoader(load.LoaderPlugin): # colour the group node project_name = context["project"]["name"] settings = get_project_settings(project_name) - colors = settings['maya']['load']['colors'] - c = colors.get(family) - if c is not None: + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1) cmds.setAttr( - "{0}.outlinerColor".format(group_node), - (float(c[0]) / 255), - (float(c[1]) / 255), - (float(c[2]) / 255) + "{0}.outlinerColor".format(group_node), red, green, blue ) return containerise( @@ -98,7 +96,7 @@ class VRayProxyLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): # type: (dict, dict) -> None """Update container with specified representation.""" node = container['objectName'] @@ -109,9 +107,10 @@ class VRayProxyLoader(load.LoaderPlugin): assert vraymeshes, "Cannot find VRayMesh in container" # get all representations for this version + repre_doc = context["representation"] filename = ( - self._get_abc(representation["parent"]) - or get_representation_path(representation) + self._get_abc(repre_doc["parent"]) + or get_representation_path(repre_doc) ) for vray_mesh in vraymeshes: @@ -121,7 +120,7 @@ class VRayProxyLoader(load.LoaderPlugin): # Update metadata cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + str(repre_doc["_id"]), type="string") def remove(self, container): @@ -142,10 +141,10 @@ class VRayProxyLoader(load.LoaderPlugin): self.log.warning("Namespace not deleted because it " "still has members: %s", namespace) - def switch(self, container, representation): + def switch(self, container, context): # type: (dict, dict) -> None """Switch loaded representation.""" - self.update(container, representation) + self.update(container, context) def create_vray_proxy(self, name, filename): # type: (str, str) -> (list, str) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vrayscene.py b/client/ayon_core/hosts/maya/plugins/load/load_vrayscene.py index 92d2b32549..a5bfd9dcc3 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vrayscene.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vrayscene.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -import os import maya.cmds as cmds # noqa from ayon_core.settings import get_project_settings from ayon_core.pipeline import ( @@ -12,6 +11,7 @@ from ayon_core.hosts.maya.api.lib import ( unique_namespace ) from ayon_core.hosts.maya.api.pipeline import containerise +from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type class VRaySceneLoader(load.LoaderPlugin): @@ -26,12 +26,10 @@ class VRaySceneLoader(load.LoaderPlugin): color = "orange" def load(self, context, name, namespace, data): - - try: - family = context["representation"]["context"]["family"] + product_type = context["representation"]["context"]["family"] except ValueError: - family = "vrayscene_layer" + product_type = "vrayscene_layer" asset_name = context['asset']["name"] namespace = namespace or unique_namespace( @@ -58,14 +56,12 @@ class VRaySceneLoader(load.LoaderPlugin): # colour the group node project_name = context["project"]["name"] settings = get_project_settings(project_name) - colors = settings['maya']['load']['colors'] - c = colors.get(family) - if c is not None: + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color cmds.setAttr("{0}.useOutlinerColor".format(root_node), 1) - cmds.setAttr("{0}.outlinerColor".format(root_node), - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) + cmds.setAttr( + "{0}.outlinerColor".format(root_node), red, green, blue ) return containerise( @@ -75,7 +71,7 @@ class VRaySceneLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): node = container['objectName'] assert cmds.objExists(node), "Missing container" @@ -84,7 +80,8 @@ class VRaySceneLoader(load.LoaderPlugin): vraymeshes = cmds.ls(members, type="VRayScene") assert vraymeshes, "Cannot find VRayScene in container" - filename = get_representation_path(representation) + repre_doc = context["representation"] + filename = get_representation_path(repre_doc) for vray_mesh in vraymeshes: cmds.setAttr("{}.FilePath".format(vray_mesh), @@ -93,7 +90,7 @@ class VRaySceneLoader(load.LoaderPlugin): # Update metadata cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + str(repre_doc["_id"]), type="string") def remove(self, container): @@ -113,8 +110,8 @@ class VRaySceneLoader(load.LoaderPlugin): self.log.warning("Namespace not deleted because it " "still has members: %s", namespace) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def create_vray_scene(self, name, filename): """Re-create the structure created by VRay to support vrscenes diff --git a/client/ayon_core/hosts/maya/plugins/load/load_xgen.py b/client/ayon_core/hosts/maya/plugins/load/load_xgen.py index 4c38835350..fdac62a250 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_xgen.py @@ -113,7 +113,7 @@ class XgenLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): ) cmds.setAttr("{}.xgExportAsDelta".format(xgen_palette), True) - def update(self, container, representation): + def update(self, container, context): """Workflow for updating Xgen. - Export changes to delta file. @@ -147,7 +147,8 @@ class XgenLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): self.set_palette_attributes(xgen_palette, xgen_file, xgd_file) - maya_file = get_representation_path(representation) + repre_doc = context["representation"] + maya_file = get_representation_path(repre_doc) _, extension = os.path.splitext(maya_file) new_xgen_file = maya_file.replace(extension, ".xgen") data_path = "" @@ -173,7 +174,7 @@ class XgenLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): "{}.xgExportAsDelta".format(xgen_palette): False } with attribute_values(attribute_data): - super().update(container, representation) + super().update(container, context) xgenm.applyDelta(xgen_palette.replace("|", ""), xgd_file) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py b/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py index d2fc1c0ab0..1c6423a8d7 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py @@ -13,6 +13,7 @@ from ayon_core.pipeline import ( ) from ayon_core.hosts.maya.api import lib from ayon_core.hosts.maya.api.pipeline import containerise +from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type # Do not reset these values on update but only apply on first load @@ -56,9 +57,9 @@ class YetiCacheLoader(load.LoaderPlugin): """ try: - family = context["representation"]["context"]["family"] + product_type = context["representation"]["context"]["family"] except ValueError: - family = "yeticache" + product_type = "yeticache" # Build namespace asset = context["asset"] @@ -81,16 +82,11 @@ class YetiCacheLoader(load.LoaderPlugin): project_name = context["project"]["name"] settings = get_project_settings(project_name) - colors = settings['maya']['load']['colors'] - - c = colors.get(family) - if c is not None: + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color cmds.setAttr(group_node + ".useOutlinerColor", 1) - cmds.setAttr(group_node + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + cmds.setAttr(group_node + ".outlinerColor", red, green, blue) nodes.append(group_node) @@ -126,12 +122,12 @@ class YetiCacheLoader(load.LoaderPlugin): cmds.namespace(removeNamespace=namespace, deleteNamespaceContent=True) - def update(self, container, representation): - + def update(self, container, context): + repre_doc = context["representation"] namespace = container["namespace"] container_node = container["objectName"] - path = get_representation_path(representation) + path = get_representation_path(repre_doc) settings = self.read_settings(path) # Collect scene information of asset @@ -220,11 +216,11 @@ class YetiCacheLoader(load.LoaderPlugin): set_attribute(attr, value, yeti_node) cmds.setAttr("{}.representation".format(container_node), - str(representation["_id"]), + str(repre_doc["_id"]), typ="string") - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) # helper functions def create_namespace(self, asset): diff --git a/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py b/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py index 2572e550e2..310c943198 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py @@ -1,11 +1,10 @@ import maya.cmds as cmds -from ayon_core.settings import get_current_project_settings -import ayon_core.hosts.maya.api.plugin +from ayon_core.hosts.maya.api import plugin from ayon_core.hosts.maya.api import lib -class YetiRigLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): +class YetiRigLoader(plugin.ReferenceLoader): """This loader will load Yeti rig.""" families = ["yetiRig"] @@ -41,14 +40,12 @@ class YetiRigLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): groupName=group_name ) - settings = get_current_project_settings() - colors = settings["maya"]["load"]["colors"] - c = colors.get("yetiRig") - if c is not None: + color = plugin.get_load_color_for_product_type("yetiRig") + if color is not None: + red, green, blue = color cmds.setAttr(group_name + ".useOutlinerColor", 1) cmds.setAttr( - group_name + ".outlinerColor", - (float(c[0]) / 255), (float(c[1]) / 255), (float(c[2]) / 255) + group_name + ".outlinerColor", red, green, blue ) self[:] = nodes diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_animation.py b/client/ayon_core/hosts/maya/plugins/publish/collect_animation.py index 26a0a01c8b..2ab6511ece 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_animation.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_animation.py @@ -25,13 +25,14 @@ class CollectAnimationOutputGeometry(pyblish.api.InstancePlugin): def process(self, instance): """Collect the hierarchy nodes""" - family = instance.data["family"] + product_type = instance.data["productType"] out_set = next((i for i in instance.data["setMembers"] if i.endswith("out_SET")), None) if out_set is None: - warning = "Expecting out_SET for instance of family '%s'" % family - self.log.warning(warning) + self.log.warning(( + "Expecting out_SET for instance of product type '{}'" + ).format(product_type)) return members = cmds.ls(cmds.sets(out_set, query=True), long=True) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_history.py b/client/ayon_core/hosts/maya/plugins/publish/collect_history.py index d4e8c6298b..2da74991c0 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_history.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_history.py @@ -10,7 +10,7 @@ class CollectMayaHistory(pyblish.api.InstancePlugin): This removes render layers collected in the history This is separate from Collect Instances so we can target it towards only - specific family types. + specific product types. """ diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py b/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py index 0b29851db0..85be15bb7b 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py @@ -11,7 +11,7 @@ class CollectNewInstances(pyblish.api.InstancePlugin): an objectSet and marked with a unique identifier; Identifier: - id (str): "pyblish.avalon.instance" + id (str): "ayon.create.instance" Limitations: - Does not take into account nodes connected to those @@ -28,7 +28,7 @@ class CollectNewInstances(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder hosts = ["maya"] - valid_empty_families = {"workfile", "renderlayer"} + valid_empty_product_types = {"workfile", "renderlayer"} def process(self, instance): @@ -60,7 +60,9 @@ class CollectNewInstances(pyblish.api.InstancePlugin): instance[:] = members_hierarchy - elif instance.data["family"] not in self.valid_empty_families: + elif ( + instance.data["productType"] not in self.valid_empty_product_types + ): self.log.warning("Empty instance: \"%s\" " % objset) # Store the exact members of the object set instance.data["setMembers"] = members diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_model.py b/client/ayon_core/hosts/maya/plugins/publish/collect_model.py index 557f96fe7a..9d45ed63bc 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_model.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_model.py @@ -8,8 +8,11 @@ class CollectModelData(pyblish.api.InstancePlugin): Ensures always only a single frame is extracted (current frame). + Todo: + Validate if is this plugin still useful. + Note: - This is a workaround so that the `pype.model` family can use the + This is a workaround so that the `model` product type can use the same pointcache extractor implementation as animation and pointcaches. This always enforces the "current" frame to be published. diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_render.py b/client/ayon_core/hosts/maya/plugins/publish/collect_render.py index e4221a091c..19e0c133c4 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_render.py @@ -8,18 +8,17 @@ publishing on farm. Requires: instance -> families instance -> setMembers + instance -> folderPath context -> currentFile context -> workspaceDir context -> user - session -> AVALON_ASSET - Optional: Provides: instance -> label - instance -> subset + instance -> productName instance -> attachTo instance -> setMembers instance -> publish @@ -27,9 +26,6 @@ Provides: instance -> frameEnd instance -> byFrameStep instance -> renderer - instance -> family - instance -> families - instance -> asset instance -> time instance -> author instance -> source @@ -91,18 +87,18 @@ class CollectMayaRender(pyblish.api.InstancePlugin): ) self.log.warning(msg) - # detect if there are sets (subsets) to attach render to + # detect if there are sets (products) to attach render to sets = cmds.sets(objset, query=True) or [] attach_to = [] for s in sets: - if not cmds.attributeQuery("family", node=s, exists=True): + if not cmds.attributeQuery("productType", node=s, exists=True): continue attach_to.append( { "version": None, # we need integrator for that - "subset": s, - "family": cmds.getAttr("{}.family".format(s)), + "productName": s, + "productType": cmds.getAttr("{}.productType".format(s)), } ) self.log.debug(" -> attach render to: {}".format(s)) @@ -146,13 +142,13 @@ class CollectMayaRender(pyblish.api.InstancePlugin): ) ) - # if we want to attach render to subset, check if we have AOV's + # if we want to attach render to product, check if we have AOV's # in expectedFiles. If so, raise error as we cannot attach AOV - # (considered to be subset on its own) to another subset + # (considered to be product on its own) to another product if attach_to: assert isinstance(expected_files, list), ( "attaching multiple AOVs or renderable cameras to " - "subset is not supported" + "product is not supported" ) # append full path @@ -294,9 +290,7 @@ class CollectMayaRender(pyblish.api.InstancePlugin): "colorspaceView": colorspace_data["view"], } - rr_settings = ( - context.data["system_settings"]["modules"]["royalrender"] - ) + rr_settings = context.data["project_settings"]["royalrender"] if rr_settings["enabled"]: data["rrPathName"] = instance.data.get("rrPathName") self.log.debug(data["rrPathName"]) @@ -304,11 +298,11 @@ class CollectMayaRender(pyblish.api.InstancePlugin): if self.sync_workfile_version: data["version"] = context.data["version"] for _instance in context: - if _instance.data['family'] == "workfile": + if _instance.data["productType"] == "workfile": _instance.data["version"] = context.data["version"] # Define nice label - label = "{0} ({1})".format(layer_name, instance.data["asset"]) + label = "{0} ({1})".format(layer_name, instance.data["folderPath"]) label += " [{0}-{1}]".format( int(data["frameStartHandle"]), int(data["frameEndHandle"]) ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_render_layer_aovs.py b/client/ayon_core/hosts/maya/plugins/publish/collect_render_layer_aovs.py index 585eca5dce..1c83918155 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_render_layer_aovs.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_render_layer_aovs.py @@ -62,12 +62,12 @@ class CollectRenderLayerAOVS(pyblish.api.InstancePlugin): continue pass_name = self.get_pass_name(renderer, element) - render_pass = "%s.%s" % (instance.data["subset"], pass_name) + render_pass = "%s.%s" % (instance.data["productName"], pass_name) result.append(render_pass) self.log.debug("Found {} render elements / AOVs for " - "'{}'".format(len(result), instance.data["subset"])) + "'{}'".format(len(result), instance.data["productName"])) instance.data["renderPasses"] = result diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_review.py b/client/ayon_core/hosts/maya/plugins/publish/collect_review.py index 679a21243a..58d02294c5 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_review.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_review.py @@ -39,7 +39,7 @@ class CollectReview(pyblish.api.InstancePlugin): if display_lights == "project_settings": settings = instance.context.data["project_settings"] settings = settings["maya"]["publish"]["ExtractPlayblast"] - settings = settings["capture_preset"]["Viewport Options"] + settings = settings["capture_preset"]["ViewportOptions"] display_lights = settings["displayLights"] # Collect camera focal length. @@ -57,26 +57,26 @@ class CollectReview(pyblish.api.InstancePlugin): burninDataMembers["focalLength"] = focal_length # Account for nested instances like model. - reviewable_subsets = list(set(members) & objectset) - if reviewable_subsets: - if len(reviewable_subsets) > 1: + reviewable_products = list(set(members) & objectset) + if reviewable_products: + if len(reviewable_products) > 1: raise KnownPublishError( - "Multiple attached subsets for review are not supported. " - "Attached: {}".format(", ".join(reviewable_subsets)) + "Multiple attached products for review are not supported. " + "Attached: {}".format(", ".join(reviewable_products)) ) - reviewable_subset = reviewable_subsets[0] + reviewable_product = reviewable_products[0] self.log.debug( - "Subset attached to review: {}".format(reviewable_subset) + "Subset attached to review: {}".format(reviewable_product) ) # Find the relevant publishing instance in the current context reviewable_inst = next(inst for inst in context - if inst.name == reviewable_subset) + if inst.name == reviewable_product) data = reviewable_inst.data self.log.debug( - 'Adding review family to {}'.format(reviewable_subset) + 'Adding review family to {}'.format(reviewable_product) ) if data.get('families'): data['families'].append('review') @@ -119,16 +119,16 @@ class CollectReview(pyblish.api.InstancePlugin): project_name = instance.context.data["projectName"] asset_doc = instance.context.data['assetEntity'] task = instance.context.data["task"] - legacy_subset_name = task + 'Review' + legacy_product_name = task + 'Review' subset_doc = get_subset_by_name( project_name, - legacy_subset_name, + legacy_product_name, asset_doc["_id"], fields=["_id"] ) if subset_doc: - self.log.debug("Existing subsets found, keep legacy name.") - instance.data['subset'] = legacy_subset_name + self.log.debug("Existing products found, keep legacy name.") + instance.data["productName"] = legacy_product_name instance.data["cameras"] = cameras instance.data['review_camera'] = camera diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_vrayproxy.py b/client/ayon_core/hosts/maya/plugins/publish/collect_vrayproxy.py index 24521a2f09..8630f56e58 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_vrayproxy.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_vrayproxy.py @@ -14,7 +14,7 @@ class CollectVrayProxy(pyblish.api.InstancePlugin): def process(self, instance): """Collector entry point.""" - if not instance.data.get('families'): + if not instance.data.get("families"): instance.data["families"] = [] if instance.data.get("vrmesh"): diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_vrayscene.py b/client/ayon_core/hosts/maya/plugins/publish/collect_vrayscene.py index 0efefe72c7..9548cd9387 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_vrayscene.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_vrayscene.py @@ -1,13 +1,8 @@ # -*- coding: utf-8 -*- """Collect Vray Scene and prepare it for extraction and publishing.""" -import re - -import maya.app.renderSetup.model.renderSetup as renderSetup -from maya import cmds import pyblish.api -from ayon_core.pipeline import legacy_io from ayon_core.lib import get_formatted_current_time from ayon_core.hosts.maya.api import lib @@ -61,8 +56,9 @@ class CollectVrayScene(pyblish.api.InstancePlugin): frame_end_handle = frame_end_render # Get layer specific settings, might be overrides + product_type = "vrayscene_layer" data = { - "subset": layer_name, + "productName": layer_name, "layer": layer_name, # TODO: This likely needs fixing now # Before refactor: cmds.sets(layer, q=True) or ["*"] @@ -79,9 +75,10 @@ class CollectVrayScene(pyblish.api.InstancePlugin): self.get_render_attribute("byFrameStep", layer=layer_name)), "renderer": renderer, - # instance subset - "family": "vrayscene_layer", - "families": ["vrayscene_layer"], + # instance product type + "productType": product_type, + "family": product_type, + "families": [product_type], "time": get_formatted_current_time(), "author": context.data["user"], # Add source to allow tracing back to the scene from @@ -104,7 +101,7 @@ class CollectVrayScene(pyblish.api.InstancePlugin): instance.data.update(data) # Define nice label - label = "{0} ({1})".format(layer_name, instance.data["asset"]) + label = "{0} ({1})".format(layer_name, instance.data["folderPath"]) label += " [{0}-{1}]".format( int(data["frameStartHandle"]), int(data["frameEndHandle"]) ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/determine_future_version.py b/client/ayon_core/hosts/maya/plugins/publish/determine_future_version.py index afa249aca2..47fb4f03fe 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/determine_future_version.py +++ b/client/ayon_core/hosts/maya/plugins/publish/determine_future_version.py @@ -1,8 +1,9 @@ -import pyblish +import pyblish.api + class DetermineFutureVersion(pyblish.api.InstancePlugin): """ - This will determine version of subset if we want render to be attached to. + This will determine version of product if we want render to be attached to. """ label = "Determine Subset Version" order = pyblish.api.IntegratorOrder @@ -11,18 +12,25 @@ class DetermineFutureVersion(pyblish.api.InstancePlugin): def process(self, instance): context = instance.context - attach_to_subsets = [s["subset"] for s in instance.data['attachTo']] - - if not attach_to_subsets: + attatch_to_products = [ + i["productName"] + for i in instance.data["attachTo"] + ] + if not attatch_to_products: return for i in context: - if i.data["subset"] in attach_to_subsets: - # # this will get corresponding subset in attachTo list - # # so we can set version there - sub = next(item for item in instance.data['attachTo'] if item["subset"] == i.data["subset"]) # noqa: E501 + if i.data["productName"] not in attatch_to_products: + continue + # # this will get corresponding product in attachTo list + # # so we can set version there + sub = next( + item + for item in instance.data["attachTo"] + if item["productName"] == i.data["productName"] + ) - sub["version"] = i.data.get("version", 1) - self.log.info("render will be attached to {} v{}".format( - sub["subset"], sub["version"] - )) + sub["version"] = i.data.get("version", 1) + self.log.info("render will be attached to {} v{}".format( + sub["productName"], sub["version"] + )) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_camera_alembic.py b/client/ayon_core/hosts/maya/plugins/publish/extract_camera_alembic.py index b9561e299e..74abc8de75 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_camera_alembic.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_camera_alembic.py @@ -1,4 +1,5 @@ import os +import json from maya import cmds @@ -13,15 +14,15 @@ class ExtractCameraAlembic(publish.Extractor, The camera gets baked to world space by default. Only when the instance's `bakeToWorldSpace` is set to False it will include its full hierarchy. - 'camera' family expects only single camera, if multiple cameras are needed, - 'matchmove' is better choice. + 'camera' product type expects only single camera, if multiple cameras + are needed, 'matchmove' is better choice. """ label = "Extract Camera (Alembic)" hosts = ["maya"] families = ["camera", "matchmove"] - bake_attributes = [] + bake_attributes = "[]" def process(self, instance): @@ -95,11 +96,12 @@ class ExtractCameraAlembic(publish.Extractor, job_str += ' -file "{0}"'.format(path) + bake_attributes = json.loads(self.bake_attributes) # bake specified attributes in preset - assert isinstance(self.bake_attributes, (list, tuple)), ( + assert isinstance(bake_attributes, list), ( "Attributes to bake must be specified as a list" ) - for attr in self.bake_attributes: + for attr in bake_attributes: self.log.debug("Adding {} attribute".format(attr)) job_str += " -attr {0}".format(attr) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py b/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py index 8ca1fd9d3a..c4af2914cd 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py @@ -93,8 +93,8 @@ class ExtractCameraMayaScene(publish.Extractor, The cameras gets baked to world space by default. Only when the instance's `bakeToWorldSpace` is set to False it will include its full hierarchy. - 'camera' family expects only single camera, if multiple cameras are needed, - 'matchmove' is better choice. + 'camera' product type expects only single camera, if multiple cameras are + needed, 'matchmove' is better choice. Note: The extracted Maya ascii file gets "massaged" removing the uuid values @@ -112,9 +112,11 @@ class ExtractCameraMayaScene(publish.Extractor, def process(self, instance): """Plugin entry point.""" # get settings - ext_mapping = ( - instance.context.data["project_settings"]["maya"]["ext_mapping"] - ) + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } if ext_mapping: self.log.debug("Looking in settings for scene type ...") # use extension mapping for first family found diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_import_reference.py b/client/ayon_core/hosts/maya/plugins/publish/extract_import_reference.py index 2a43a30b8d..3fb84c8d83 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_import_reference.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_import_reference.py @@ -37,9 +37,11 @@ class ExtractImportReference(publish.Extractor, if not self.is_active(instance.data): return - ext_mapping = ( - instance.context.data["project_settings"]["maya"]["ext_mapping"] - ) + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } if ext_mapping: self.log.debug("Looking in settings for scene type ...") # use extension mapping for first family found diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_layout.py b/client/ayon_core/hosts/maya/plugins/publish/extract_layout.py index f6e663174a..441610b749 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_layout.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_layout.py @@ -67,10 +67,10 @@ class ExtractLayout(publish.Extractor): self.log.debug(representation) version_id = representation.get("parent") - family = representation.get("context").get("family") + product_type = representation.get("context").get("family") json_element = { - "family": family, + "product_type": product_type, "instance_name": cmds.getAttr( "{}.namespace".format(container)), "representation": str(representation_id), diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_look.py b/client/ayon_core/hosts/maya/plugins/publish/extract_look.py index 7f97a7bf82..469608100d 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_look.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_look.py @@ -104,11 +104,10 @@ class TextureProcessor: log = logging.getLogger(self.__class__.__name__) self.log = log - def apply_settings(self, system_settings, project_settings): + def apply_settings(self, project_settings): """Apply OpenPype system/project settings to the TextureProcessor Args: - system_settings (dict): OpenPype system settings project_settings (dict): OpenPype project settings Returns: @@ -250,7 +249,7 @@ class MakeTX(TextureProcessor): super(MakeTX, self).__init__(log=log) self.extra_args = [] - def apply_settings(self, system_settings, project_settings): + def apply_settings(self, project_settings): # Allow extra maketx arguments from project settings args_settings = ( project_settings["maya"]["publish"] @@ -431,9 +430,11 @@ class ExtractLook(publish.Extractor): project settings. """ - ext_mapping = ( - instance.context.data["project_settings"]["maya"]["ext_mapping"] - ) + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } if ext_mapping: self.log.debug("Looking in settings for scene type ...") # use extension mapping for first family found @@ -486,8 +487,7 @@ class ExtractLook(publish.Extractor): }.items(): if instance.data.get(key, False): processor = Processor(log=self.log) - processor.apply_settings(context.data["system_settings"], - context.data["project_settings"]) + processor.apply_settings(context.data["project_settings"]) processors.append(processor) if processors: diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_maya_scene_raw.py b/client/ayon_core/hosts/maya/plugins/publish/extract_maya_scene_raw.py index 5045a8d252..2fd4f44449 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_maya_scene_raw.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_maya_scene_raw.py @@ -5,7 +5,11 @@ import os from maya import cmds from ayon_core.hosts.maya.api.lib import maintained_selection -from ayon_core.pipeline import AVALON_CONTAINER_ID, publish +from ayon_core.pipeline import ( + AYON_CONTAINER_ID, + AVALON_CONTAINER_ID, + publish, +) from ayon_core.pipeline.publish import AYONPyblishPluginMixin from ayon_core.lib import BoolDef @@ -43,9 +47,11 @@ class ExtractMayaSceneRaw(publish.Extractor, AYONPyblishPluginMixin): def process(self, instance): """Plugin entry point.""" - ext_mapping = ( - instance.context.data["project_settings"]["maya"]["ext_mapping"] - ) + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } if ext_mapping: self.log.debug("Looking in settings for scene type ...") # use extension mapping for first family found @@ -77,7 +83,7 @@ class ExtractMayaSceneRaw(publish.Extractor, AYONPyblishPluginMixin): selection = members if set(self.add_for_families).intersection( set(instance.data.get("families", []))) or \ - instance.data.get("family") in self.add_for_families: + instance.data.get("productType") in self.add_for_families: selection += self._get_loaded_containers(members) # Perform extraction @@ -134,7 +140,9 @@ class ExtractMayaSceneRaw(publish.Extractor, AYONPyblishPluginMixin): continue id_attr = "{}.id".format(obj_set) - if cmds.getAttr(id_attr) != AVALON_CONTAINER_ID: + if cmds.getAttr(id_attr) not in { + AYON_CONTAINER_ID, AVALON_CONTAINER_ID + }: continue set_content = set(cmds.sets(obj_set, query=True)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_model.py b/client/ayon_core/hosts/maya/plugins/publish/extract_model.py index b6ae4d537a..543af59e8f 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_model.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_model.py @@ -35,9 +35,11 @@ class ExtractModel(publish.Extractor, if not self.is_active(instance.data): return - ext_mapping = ( - instance.context.data["project_settings"]["maya"]["ext_mapping"] - ) + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } if ext_mapping: self.log.debug("Looking in settings for scene type ...") # use extension mapping for first family found diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_playblast.py b/client/ayon_core/hosts/maya/plugins/publish/extract_playblast.py index c019d43b36..a394d880ff 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_playblast.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_playblast.py @@ -43,7 +43,7 @@ class ExtractPlayblast(publish.Extractor): capture_preset = lib.get_capture_preset( task_data.get("name"), task_data.get("type"), - instance.data["subset"], + instance.data["productName"], instance.context.data["project_settings"], self.log ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_redshift_proxy.py b/client/ayon_core/hosts/maya/plugins/publish/extract_redshift_proxy.py index 5600b980d9..9286869c60 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_redshift_proxy.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_redshift_proxy.py @@ -28,7 +28,7 @@ class ExtractRedshiftProxy(publish.Extractor): if not anim_on: # Remove animation information because it is not required for - # non-animated subsets + # non-animated products keys = ["frameStart", "frameEnd", "handleStart", diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_rig.py b/client/ayon_core/hosts/maya/plugins/publish/extract_rig.py index 13e3d7c6b4..305f4698c6 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_rig.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_rig.py @@ -18,9 +18,11 @@ class ExtractRig(publish.Extractor): def process(self, instance): """Plugin entry point.""" - ext_mapping = ( - instance.context.data["project_settings"]["maya"]["ext_mapping"] - ) + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } if ext_mapping: self.log.debug("Looking in settings for scene type ...") # use extension mapping for first family found diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_thumbnail.py b/client/ayon_core/hosts/maya/plugins/publish/extract_thumbnail.py index db26422897..d3140487a6 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_thumbnail.py @@ -27,7 +27,7 @@ class ExtractThumbnail(publish.Extractor): capture_preset = lib.get_capture_preset( task_data.get("name"), task_data.get("type"), - instance.data["subset"], + instance.data["productName"], instance.context.data["project_settings"], self.log ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py index 7b44c92194..edbb5f845e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py @@ -61,7 +61,7 @@ class ExtractUnrealSkeletalMeshFbx(publish.Extractor): # we rely on hierarchy under one root. original_parent = to_extract[0].split("|")[1] - parent_node = instance.data.get("asset") + parent_node = instance.data.get("folderPath") # this needs to be done for AYON # WARNING: since AYON supports duplicity of asset names, # this needs to be refactored throughout the pipeline. diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_vrayproxy.py b/client/ayon_core/hosts/maya/plugins/publish/extract_vrayproxy.py index 28c6e98c33..d16f9e8701 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_vrayproxy.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_vrayproxy.py @@ -27,7 +27,7 @@ class ExtractVRayProxy(publish.Extractor): anim_on = instance.data["animation"] if not anim_on: # Remove animation information because it is not required for - # non-animated subsets + # non-animated products keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", "frameStartHandle", "frameEndHandle"] diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py b/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py index d8b352668a..9aaba532b2 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py @@ -45,7 +45,7 @@ class ExtractWorkfileXgen(publish.Extractor): is_renderlayer = ( "renderlayer" in i.data.get("families", []) or - i.data["family"] == "renderlayer" + i.data["productType"] == "renderlayer" ) return is_renderlayer diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_yeti_rig.py b/client/ayon_core/hosts/maya/plugins/publish/extract_yeti_rig.py index 7387849736..0b67117ebc 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_yeti_rig.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_yeti_rig.py @@ -100,9 +100,11 @@ class ExtractYetiRig(publish.Extractor): def process(self, instance): """Plugin entry point.""" - ext_mapping = ( - instance.context.data["project_settings"]["maya"]["ext_mapping"] - ) + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } if ext_mapping: self.log.debug("Looking in settings for scene type ...") # use extension mapping for first family found diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_animation_content.py b/client/ayon_core/hosts/maya/plugins/publish/validate_animation_content.py index f33ee1a7e7..8cf5c4278e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_animation_content.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_animation_content.py @@ -7,7 +7,7 @@ from ayon_core.pipeline.publish import ( class ValidateAnimationContent(pyblish.api.InstancePlugin): - """Adheres to the content of 'animation' family + """Adheres to the content of 'animation' product type - Must have collected `out_hierarchy` data. - All nodes in `out_hierarchy` must be in the instance. diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_attributes.py b/client/ayon_core/hosts/maya/plugins/publish/validate_attributes.py index 3dfe2f4f2d..1514972159 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_attributes.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_attributes.py @@ -1,3 +1,4 @@ +import json from collections import defaultdict import pyblish.api @@ -23,19 +24,19 @@ class ValidateAttributes(pyblish.api.InstancePlugin, """ order = ValidateContentsOrder - label = "Attributes" + label = "Validate Attributes" hosts = ["maya"] actions = [RepairAction] optional = True - attributes = None + attributes = "{}" def process(self, instance): if not self.is_active(instance.data): return # Check for preset existence. - if not self.attributes: + if not self.get_attributes_data(): return invalid = self.get_invalid(instance, compute=True) @@ -44,6 +45,10 @@ class ValidateAttributes(pyblish.api.InstancePlugin, "Found attributes with invalid values: {}".format(invalid) ) + @classmethod + def get_attributes_data(cls): + return json.loads(cls.attributes) + @classmethod def get_invalid(cls, instance, compute=False): if compute: @@ -55,21 +60,22 @@ class ValidateAttributes(pyblish.api.InstancePlugin, def get_invalid_attributes(cls, instance): invalid_attributes = [] + attributes_data = cls.get_attributes_data() # Filter families. - families = [instance.data["family"]] + families = [instance.data["productType"]] families += instance.data.get("families", []) - families = set(families) & set(cls.attributes.keys()) + families = set(families) & set(attributes_data.keys()) if not families: return [] # Get all attributes to validate. attributes = defaultdict(dict) for family in families: - if family not in cls.attributes: + if family not in attributes_data: # No attributes to validate for family continue - for preset_attr, preset_value in cls.attributes[family].items(): + for preset_attr, preset_value in attributes_data[family].items(): node_name, attribute_name = preset_attr.split(".", 1) attributes[node_name][attribute_name] = preset_value diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_frame_range.py b/client/ayon_core/hosts/maya/plugins/publish/validate_frame_range.py index 85cc606b25..5c5b691f9d 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_frame_range.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_frame_range.py @@ -39,7 +39,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin, "yeticache"] optional = True actions = [RepairAction] - exclude_families = [] + exclude_product_types = [] def process(self, instance): if not self.is_active(instance.data): @@ -73,8 +73,10 @@ class ValidateFrameRange(pyblish.api.InstancePlugin, # compare with data on instance errors = [] - if [ef for ef in self.exclude_families - if instance.data["family"] in ef]: + # QUESTION shouldn't this be just: + # 'if instance.data["productType"] in self.exclude_product_types:' + if [ef for ef in self.exclude_product_types + if instance.data["productType"] in ef]: return if (inst_start != frame_start_handle): errors.append("Instance start frame [ {} ] doesn't " diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_has_members.py b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_has_members.py index 5a530236db..16e04af446 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_has_members.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_has_members.py @@ -26,7 +26,7 @@ class ValidateInstanceHasMembers(pyblish.api.InstancePlugin): def process(self, instance): # Allow renderlayer, rendersetup and workfile to be empty skip_families = {"workfile", "renderlayer", "rendersetup"} - if instance.data.get("family") in skip_families: + if instance.data.get("productType") in skip_families: return invalid = self.get_invalid(instance) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_in_context.py b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_in_context.py index c683c1b30f..43b4f06e3f 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_in_context.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_in_context.py @@ -37,7 +37,7 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin, if not self.is_active(instance.data): return - asset = instance.data.get("asset") + asset = instance.data.get("folderPath") context_asset = self.get_context_asset(instance) if asset != context_asset: raise PublishValidationError( @@ -74,4 +74,4 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin, @staticmethod def get_context_asset(instance): - return instance.context.data["asset"] + return instance.context.data["folderPath"] diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_subset.py b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_subset.py index 4229cfeb55..da3a194e58 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_subset.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_subset.py @@ -14,36 +14,40 @@ allowed = set(string.ascii_lowercase + '_') -def validate_name(subset): - return all(x in allowed for x in subset) +def validate_name(product_name): + return all(x in allowed for x in product_name) class ValidateSubsetName(pyblish.api.InstancePlugin): - """Validates subset name has only valid characters""" + """Validates product name has only valid characters""" order = ValidateContentsOrder families = ["*"] - label = "Subset Name" + label = "Product Name" def process(self, instance): - subset = instance.data.get("subset", None) + product_name = instance.data.get("productName", None) - # Ensure subset data - if subset is None: - raise PublishValidationError("Instance is missing subset " - "name: {0}".format(subset)) + # Ensure product data + if product_name is None: + raise PublishValidationError( + "Instance is missing product name: {0}".format(product_name) + ) - if not isinstance(subset, six.string_types): - raise TypeError("Instance subset name must be string, " - "got: {0} ({1})".format(subset, type(subset))) + if not isinstance(product_name, six.string_types): + raise TypeError(( + "Instance product name must be string, got: {0} ({1})" + ).format(product_name, type(product_name))) - # Ensure is not empty subset - if not subset: - raise ValueError("Instance subset name is " - "empty: {0}".format(subset)) + # Ensure is not empty product + if not product_name: + raise ValueError( + "Instance product name is empty: {0}".format(product_name) + ) - # Validate subset characters - if not validate_name(subset): - raise ValueError("Instance subset name contains invalid " - "characters: {0}".format(subset)) + # Validate product characters + if not validate_name(product_name): + raise ValueError(( + "Instance product name contains invalid characters: {0}" + ).format(product_name)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py b/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py index 8cc2675dc7..b0db5e435a 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py @@ -10,7 +10,7 @@ from ayon_core.pipeline.publish import ( class ValidateModelContent(pyblish.api.InstancePlugin): - """Adheres to the content of 'model' family + """Adheres to the content of 'model' product type - Must have one top group. (configurable) - Must only contain: transforms, meshes and groups diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_model_name.py b/client/ayon_core/hosts/maya/plugins/publish/validate_model_name.py deleted file mode 100644 index 7812877fd3..0000000000 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_model_name.py +++ /dev/null @@ -1,162 +0,0 @@ -# -*- coding: utf-8 -*- -"""Validate model nodes names.""" -import os -import platform -import re - -import gridfs -import pyblish.api -from maya import cmds - -import ayon_core.hosts.maya.api.action -from ayon_core.client.mongo import OpenPypeMongoConnection -from ayon_core.hosts.maya.api.shader_definition_editor import ( - DEFINITION_FILENAME) -from ayon_core.pipeline import legacy_io -from ayon_core.pipeline.publish import ( - OptionalPyblishPluginMixin, PublishValidationError, ValidateContentsOrder) - - -class ValidateModelName(pyblish.api.InstancePlugin, - OptionalPyblishPluginMixin): - """Validate name of model - - starts with (somename)_###_(materialID)_GEO - materialID must be present in list - padding number doesn't have limit - - """ - optional = True - order = ValidateContentsOrder - hosts = ["maya"] - families = ["model"] - label = "Model Name" - actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] - material_file = None - database_file = DEFINITION_FILENAME - - @classmethod - def get_invalid(cls, instance): - """Get invalid nodes.""" - use_db = cls.database - - def is_group(group_name): - """Find out if supplied transform is group or not.""" - try: - children = cmds.listRelatives(group_name, children=True) - for child in children: - if not cmds.ls(child, transforms=True): - return False - return True - except Exception: - return False - - invalid = [] - content_instance = instance.data.get("setMembers", None) - if not content_instance: - cls.log.error("Instance has no nodes!") - return True - pass - - # validate top level group name - assemblies = cmds.ls(content_instance, assemblies=True, long=True) - if len(assemblies) != 1: - cls.log.error("Must have exactly one top group") - return assemblies or True - top_group = assemblies[0] - regex = cls.top_level_regex - r = re.compile(regex) - m = r.match(top_group) - project_name = instance.context.data["projectName"] - current_asset_name = instance.context.data["asset"] - if m is None: - cls.log.error("invalid name on: {}".format(top_group)) - cls.log.error("name doesn't match regex {}".format(regex)) - invalid.append(top_group) - else: - if "asset" in r.groupindex: - if m.group("asset") != current_asset_name: - cls.log.error("Invalid asset name in top level group.") - return top_group - if "subset" in r.groupindex: - if m.group("subset") != instance.data.get("subset"): - cls.log.error("Invalid subset name in top level group.") - return top_group - if "project" in r.groupindex: - if m.group("project") != project_name: - cls.log.error("Invalid project name in top level group.") - return top_group - - descendants = cmds.listRelatives(content_instance, - allDescendents=True, - fullPath=True) or [] - - descendants = cmds.ls(descendants, noIntermediate=True, long=True) - trns = cmds.ls(descendants, long=False, type='transform') - - # filter out groups - filtered = [node for node in trns if not is_group(node)] - - # load shader list file as utf-8 - shaders = [] - if not use_db: - material_file = cls.material_file[platform.system().lower()] - if material_file: - if os.path.isfile(material_file): - shader_file = open(material_file, "r") - shaders = shader_file.readlines() - shader_file.close() - else: - cls.log.error("Missing shader name definition file.") - return True - else: - client = OpenPypeMongoConnection.get_mongo_client() - fs = gridfs.GridFS(client[os.getenv("OPENPYPE_DATABASE_NAME")]) - shader_file = fs.find_one({"filename": cls.database_file}) - if not shader_file: - cls.log.error("Missing shader name definition in database.") - return True - shaders = shader_file.read().splitlines() - shader_file.close() - - # strip line endings from list - shaders = [s.rstrip() for s in shaders if s.rstrip()] - - # compile regex for testing names - regex = cls.regex - r = re.compile(regex) - - for obj in filtered: - cls.log.debug("testing: {}".format(obj)) - m = r.match(obj) - if m is None: - cls.log.error("invalid name on: {}".format(obj)) - invalid.append(obj) - else: - # if we have shader files and shader named group is in - # regex, test this group against names in shader file - if "shader" in r.groupindex and shaders: - try: - if not m.group('shader') in shaders: - cls.log.error( - "invalid materialID on: {0} ({1})".format( - obj, m.group('shader'))) - invalid.append(obj) - except IndexError: - # shader named group doesn't match - cls.log.error( - "shader group doesn't match: {}".format(obj)) - invalid.append(obj) - - return invalid - - def process(self, instance): - """Plugin entry point.""" - if not self.is_active(instance.data): - return - - invalid = self.get_invalid(instance) - - if invalid: - raise PublishValidationError( - "Model naming is invalid. See the log.") diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py index bf12def5e9..de86ffe575 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py @@ -3,7 +3,6 @@ import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.client import get_assets from ayon_core.hosts.maya.api import lib -from ayon_core.pipeline import legacy_io from ayon_core.pipeline.publish import ( PublishValidationError, ValidatePipelineOrder) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_plugin_path_attributes.py b/client/ayon_core/hosts/maya/plugins/publish/validate_plugin_path_attributes.py index d672be6fa0..fd71039e30 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_plugin_path_attributes.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_plugin_path_attributes.py @@ -30,14 +30,18 @@ class ValidatePluginPathAttributes(pyblish.api.InstancePlugin): def get_invalid(cls, instance): invalid = list() - file_attrs = cls.attribute + file_attrs = { + item["name"]: item["value"] + for item in cls.attribute + } if not file_attrs: return invalid # Consider only valid node types to avoid "Unknown object type" warning all_node_types = set(cmds.allNodeTypes()) node_types = [ - key for key in file_attrs.keys() + key + for key in file_attrs.keys() if key in all_node_types ] diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_render_image_rule.py b/client/ayon_core/hosts/maya/plugins/publish/validate_render_image_rule.py index 576886072d..384d99df1a 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -55,12 +55,15 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): if staging_dir: cls.log.debug( "Staging dir found: \"{}\". Ignoring setting from " - "`project_settings/maya/RenderSettings/" + "`project_settings/maya/render_settings/" "default_render_image_folder`.".format(staging_dir) ) return staging_dir - return instance.context.data.get('project_settings')\ - .get('maya') \ - .get('RenderSettings') \ - .get('default_render_image_folder') + return ( + instance.context.data + ["project_settings"] + ["maya"] + ["render_settings"] + ["default_render_image_folder"] + ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_renderlayer_aovs.py b/client/ayon_core/hosts/maya/plugins/publish/validate_renderlayer_aovs.py index 71cd6d7112..900e5444a9 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_renderlayer_aovs.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_renderlayer_aovs.py @@ -2,14 +2,13 @@ import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.client import get_subset_by_name -from ayon_core.pipeline import legacy_io from ayon_core.pipeline.publish import PublishValidationError class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin): """Validate created AOVs / RenderElement is registered in the database - Each render element is registered as a subset which is formatted based on + Each render element is registered as a product which is formatted based on the render layer and the render element, example: . @@ -32,7 +31,7 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( - "Found unregistered subsets: {}".format(invalid)) + "Found unregistered products: {}".format(invalid)) def get_invalid(self, instance): invalid = [] @@ -41,7 +40,7 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin): asset_doc = instance.data["assetEntity"] render_passes = instance.data.get("renderPasses", []) for render_pass in render_passes: - is_valid = self.validate_subset_registered( + is_valid = self.validate_product_registered( project_name, asset_doc, render_pass ) if not is_valid: @@ -49,9 +48,11 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin): return invalid - def validate_subset_registered(self, project_name, asset_doc, subset_name): - """Check if subset is registered in the database under the asset""" + def validate_product_registered( + self, project_name, asset_doc, product_name + ): + """Check if product is registered in the database under the asset""" return get_subset_by_name( - project_name, subset_name, asset_doc["_id"], fields=["_id"] + project_name, product_name, asset_doc["_id"], fields=["_id"] ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_rendersettings.py b/client/ayon_core/hosts/maya/plugins/publish/validate_rendersettings.py index ed70d81b63..78a247b3f2 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_rendersettings.py @@ -265,7 +265,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): # load validation definitions from settings settings_lights_flag = instance.context.data["project_settings"].get( "maya", {}).get( - "RenderSettings", {}).get( + "render_settings", {}).get( "enable_all_lights", False) instance_lights_flag = instance.data.get("renderSetupIncludeLights") @@ -281,6 +281,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): # if so, compare its value from the one required. for data in cls.get_nodes(instance, renderer): for node in data["nodes"]: + # Why is captured 'PublishValidationError'? How it can be + # raised by 'cmds.getAttr(...)'? try: render_value = cmds.getAttr( "{}.{}".format(node, data["attribute"]) @@ -310,11 +312,16 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): @classmethod def get_nodes(cls, instance, renderer): maya_settings = instance.context.data["project_settings"]["maya"] + renderer_key = "{}_render_attributes".format(renderer) validation_settings = ( maya_settings["publish"]["ValidateRenderSettings"].get( - "{}_render_attributes".format(renderer) - ) or [] - ) + renderer_key + ) + ) or [] + validation_settings = [ + (item["type"], item["value"]) + for item in validation_settings + ] result = [] for attr, values in OrderedDict(validation_settings).items(): values = [convert_to_int_or_float(v) for v in values if v] diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_joints_hidden.py b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_joints_hidden.py index c6b9d23574..bb5ec8353e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_joints_hidden.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_joints_hidden.py @@ -7,6 +7,7 @@ from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, + PublishValidationError ) @@ -38,7 +39,8 @@ class ValidateRigJointsHidden(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise ValueError("Visible joints found: {0}".format(invalid)) + raise PublishValidationError( + "Visible joints found: {0}".format(invalid)) @classmethod def repair(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_shader_name.py b/client/ayon_core/hosts/maya/plugins/publish/validate_shader_name.py index cb7f975535..86ca0ca400 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_shader_name.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_shader_name.py @@ -51,7 +51,7 @@ class ValidateShaderName(pyblish.api.InstancePlugin, descendants = cmds.ls(descendants, noIntermediate=True, long=True) shapes = cmds.ls(descendants, type=["nurbsSurface", "mesh"], long=True) - asset_name = instance.data.get("asset") + asset_name = instance.data.get("folderPath") # Check the number of connected shadingEngines per shape regex_compile = re.compile(cls.regex) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_transform_naming_suffix.py b/client/ayon_core/hosts/maya/plugins/publish/validate_transform_naming_suffix.py index 1f8d6b7470..e565866778 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_transform_naming_suffix.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_transform_naming_suffix.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- """Plugin for validating naming conventions.""" +import json from maya import cmds import pyblish.api @@ -35,29 +36,37 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin, """ order = ValidateContentsOrder - hosts = ['maya'] - families = ['model'] + hosts = ["maya"] + families = ["model"] optional = True - label = 'Suffix Naming Conventions' + label = "Suffix Naming Conventions" actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] - SUFFIX_NAMING_TABLE = {"mesh": ["_GEO", "_GES", "_GEP", "_OSD"], - "nurbsCurve": ["_CRV"], - "nurbsSurface": ["_NRB"], - "locator": ["_LOC"], - "group": ["_GRP"]} + SUFFIX_NAMING_TABLE = json.dumps({ + "mesh": ["_GEO", "_GES", "_GEP", "_OSD"], + "nurbsCurve": ["_CRV"], + "nurbsSurface": ["_NRB"], + "locator": ["_LOC"], + "group": ["_GRP"] + }) ALLOW_IF_NOT_IN_SUFFIX_TABLE = True @classmethod def get_table_for_invalid(cls): - ss = [] - for k, v in cls.SUFFIX_NAMING_TABLE.items(): - ss.append(" - {}: {}".format(k, ", ".join(v))) + suffix_naming_table = json.loads(cls.SUFFIX_NAMING_TABLE) + ss = [ + " - {}: {}".format(k, ", ".join(v)) + for k, v in suffix_naming_table.items() + ] return "
".join(ss) @staticmethod - def is_valid_name(node_name, shape_type, - SUFFIX_NAMING_TABLE, ALLOW_IF_NOT_IN_SUFFIX_TABLE): + def is_valid_name( + node_name, + shape_type, + suffix_naming_table, + allow_if_not_in_suffix_table + ): """Return whether node's name is correct. The correctness for a transform's suffix is dependent on what @@ -70,18 +79,18 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin, Args: node_name (str): Node name. shape_type (str): Type of node. - SUFFIX_NAMING_TABLE (dict): Mapping dict for suffixes. - ALLOW_IF_NOT_IN_SUFFIX_TABLE (dict): Filter dict. + suffix_naming_table (dict): Mapping dict for suffixes. + allow_if_not_in_suffix_table (bool): Default output. """ - if shape_type not in SUFFIX_NAMING_TABLE: - return ALLOW_IF_NOT_IN_SUFFIX_TABLE - else: - suffices = SUFFIX_NAMING_TABLE[shape_type] - for suffix in suffices: - if node_name.endswith(suffix): - return True - return False + if shape_type not in suffix_naming_table: + return allow_if_not_in_suffix_table + + suffices = suffix_naming_table[shape_type] + for suffix in suffices: + if node_name.endswith(suffix): + return True + return False @classmethod def get_invalid(cls, instance): @@ -91,9 +100,10 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin, instance (:class:`pyblish.api.Instance`): published instance. """ - transforms = cmds.ls(instance, type='transform', long=True) + transforms = cmds.ls(instance, type="transform", long=True) invalid = [] + suffix_naming_table = json.loads(cls.SUFFIX_NAMING_TABLE) for transform in transforms: shapes = cmds.listRelatives(transform, shapes=True, @@ -101,9 +111,12 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin, noIntermediate=True) shape_type = cmds.nodeType(shapes[0]) if shapes else "group" - if not cls.is_valid_name(transform, shape_type, - cls.SUFFIX_NAMING_TABLE, - cls.ALLOW_IF_NOT_IN_SUFFIX_TABLE): + if not cls.is_valid_name( + transform, + shape_type, + suffix_naming_table, + cls.ALLOW_IF_NOT_IN_SUFFIX_TABLE + ): invalid.append(transform) return invalid diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py b/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py index d43e04da60..c9860d27a0 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py @@ -5,8 +5,6 @@ import re import pyblish.api import ayon_core.hosts.maya.api.action -from ayon_core.pipeline import legacy_io -from ayon_core.settings import get_project_settings from ayon_core.pipeline.publish import ( ValidateContentsOrder, OptionalPyblishPluginMixin, @@ -83,7 +81,7 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin, ("_" + cls.static_mesh_prefix) or "", cls.regex_mesh ) sm_r = re.compile(regex_mesh) - if not sm_r.match(instance.data.get("subset")): + if not sm_r.match(instance.data.get("productName")): cls.log.error("Mesh doesn't comply with name validation.") return True diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_visible_only.py b/client/ayon_core/hosts/maya/plugins/publish/validate_visible_only.py index f9e4c9212a..29cf9420a3 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_visible_only.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_visible_only.py @@ -36,7 +36,7 @@ class ValidateAlembicVisibleOnly(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - if instance.data["family"] == "animation": + if instance.data["productType"] == "animation": # Special behavior to use the nodes in out_SET nodes = instance.data["out_hierarchy"] else: diff --git a/client/ayon_core/hosts/maya/startup/userSetup.py b/client/ayon_core/hosts/maya/startup/userSetup.py index 882f2df27c..adbbfe4f44 100644 --- a/client/ayon_core/hosts/maya/startup/userSetup.py +++ b/client/ayon_core/hosts/maya/startup/userSetup.py @@ -38,7 +38,7 @@ if explicit_plugins_loading["enabled"]: key = "AYON_OPEN_WORKFILE_POST_INITIALIZATION" if bool(int(os.environ.get(key, "0"))): def _log_and_open(): - path = os.environ["AVALON_LAST_WORKFILE"] + path = os.environ["AYON_LAST_WORKFILE"] print("Opening \"{}\"".format(path)) cmds.file(path, open=True, force=True) cmds.evalDeferred( @@ -46,24 +46,5 @@ if bool(int(os.environ.get(key, "0"))): lowestPriority=True ) -# Build a shelf. -shelf_preset = settings['maya'].get('project_shelf') -if shelf_preset: - icon_path = os.path.join( - os.environ['OPENPYPE_PROJECT_SCRIPTS'], - project_name, - "icons") - icon_path = os.path.abspath(icon_path) - - for i in shelf_preset['imports']: - import_string = "from {} import {}".format(project_name, i) - print(import_string) - exec(import_string) - - cmds.evalDeferred( - "mlib.shelf(name=shelf_preset['name'], iconPath=icon_path," - " preset=shelf_preset)" - ) - print("Finished OpenPype usersetup.") diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/app.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/app.py index d73c5e318f..0969666484 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/app.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/app.py @@ -211,7 +211,7 @@ class MayaLookAssignerWindow(QtWidgets.QWidget): # Collect the looks we want to apply (by name) look_items = self.look_outliner.get_selected_items() - looks = {look["subset"] for look in look_items} + looks = {look["product"] for look in look_items} selection = self.assign_selected.isChecked() asset_nodes = self.asset_outliner.get_nodes(selection=selection) @@ -225,22 +225,28 @@ class MayaLookAssignerWindow(QtWidgets.QWidget): # Assign the first matching look relevant for this asset # (since assigning multiple to the same nodes makes no sense) - assign_look = next((subset for subset in item["looks"] - if subset["name"] in looks), None) + assign_look = next( + ( + subset_doc + for subset_doc in item["looks"] + if subset_doc["name"] in looks + ), + None + ) if not assign_look: self.echo( "{} No matching selected look for {}".format(prefix, asset) ) continue - # Get the latest version of this asset's look subset + # Get the latest version of this asset's look product version = get_last_version_by_subset_id( project_name, assign_look["_id"], fields=["_id"] ) - subset_name = assign_look["name"] + product_name = assign_look["name"] self.echo("{} Assigning {} to {}\t".format( - prefix, subset_name, asset + prefix, product_name, asset )) nodes = item["nodes"] @@ -251,7 +257,7 @@ class MayaLookAssignerWindow(QtWidgets.QWidget): for vp in vray_proxies: if vp in nodes: - vrayproxy_assign_look(vp, subset_name) + vrayproxy_assign_look(vp, product_name) nodes = list(set(nodes).difference(vray_proxies)) else: @@ -266,7 +272,7 @@ class MayaLookAssignerWindow(QtWidgets.QWidget): for standin in arnold_standins: if standin in nodes: - arnold_standin.assign_look(standin, subset_name) + arnold_standin.assign_look(standin, product_name) nodes = list(set(nodes).difference(arnold_standins)) else: diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/arnold_standin.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/arnold_standin.py index 9c6877fed8..810e1fc88c 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/arnold_standin.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/arnold_standin.py @@ -131,8 +131,8 @@ def shading_engine_assignments(shading_engine, attribute, nodes, assignments): assignments[node].append(assignment) -def assign_look(standin, subset): - log.info("Assigning {} to {}.".format(subset, standin)) +def assign_look(standin, product_name): + log.info("Assigning {} to {}.".format(product_name, standin)) nodes_by_id = get_nodes_by_id(standin) @@ -148,13 +148,13 @@ def assign_look(standin, subset): # Get latest look version version = get_last_version_by_subset_name( project_name, - subset_name=subset, + subset_name=product_name, asset_id=asset_id, fields=["_id"] ) if not version: - log.info("Didn't find last version for subset name {}".format( - subset + log.info("Didn't find last version for product name {}".format( + product_name )) continue diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py index 4d2f1a8443..4375d38316 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py @@ -148,7 +148,7 @@ def create_items_from_nodes(nodes): ) continue - # Collect available look subsets for this asset + # Collect available look products for this asset looks = lib.list_looks(project_name, asset_doc["_id"]) # Collect namespaces the asset is found in diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/models.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/models.py index a252f103ec..4892125954 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/models.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/models.py @@ -88,7 +88,7 @@ class LookModel(models.TreeModel): An item exists of: { - "subset": 'name of subset', + "product": 'name of product', "asset": asset_document } @@ -102,26 +102,30 @@ class LookModel(models.TreeModel): self.beginResetModel() # Collect the assets per look name (from the items of the AssetModel) - look_subsets = defaultdict(list) + look_products = defaultdict(list) for asset_item in items: asset = asset_item["asset"] for look in asset_item["looks"]: - look_subsets[look["name"]].append(asset) + look_products[look["name"]].append(asset) - for subset in sorted(look_subsets.keys()): - assets = look_subsets[subset] + for product_name in sorted(look_products.keys()): + assets = look_products[product_name] # Define nice label without "look" prefix for readability - label = subset if not subset.startswith("look") else subset[4:] + label = ( + product_name + if not product_name.startswith("look") + else product_name[4:] + ) item_node = models.Item() item_node["label"] = label - item_node["subset"] = subset + item_node["product"] = product_name # Amount of matching assets for this look item_node["match"] = len(assets) - # Store the assets that have this subset available + # Store the assets that have this product available item_node["assets"] = assets self.add_child(item_node) diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py index cbd5f7fd5c..df74dcd217 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py @@ -51,13 +51,13 @@ def assign_vrayproxy_shaders(vrayproxy, assignments): index += 1 -def vrayproxy_assign_look(vrayproxy, subset="lookDefault"): +def vrayproxy_assign_look(vrayproxy, product_name="lookDefault"): # type: (str, str) -> None """Assign look to vray proxy. Args: vrayproxy (str): Name of vrayproxy to apply look to. - subset (str): Name of look subset. + product_name (str): Name of look product. Returns: None @@ -82,13 +82,13 @@ def vrayproxy_assign_look(vrayproxy, subset="lookDefault"): # Get latest look version version = get_last_version_by_subset_name( project_name, - subset_name=subset, + subset_name=product_name, asset_id=asset_id, fields=["_id"] ) if not version: - print("Didn't find last version for subset name {}".format( - subset + print("Didn't find last version for product name {}".format( + product_name )) continue diff --git a/client/ayon_core/hosts/nuke/addon.py b/client/ayon_core/hosts/nuke/addon.py index 4ca4408271..8e640624f0 100644 --- a/client/ayon_core/hosts/nuke/addon.py +++ b/client/ayon_core/hosts/nuke/addon.py @@ -1,17 +1,14 @@ import os import platform -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon NUKE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class NukeAddon(OpenPypeModule, IHostAddon): +class NukeAddon(AYONAddon, IHostAddon): name = "nuke" host_name = "nuke" - def initialize(self, module_settings): - self.enabled = True - def add_implementation_envs(self, env, _app): # Add requirements to NUKE_PATH new_nuke_paths = [ diff --git a/client/ayon_core/hosts/nuke/api/__init__.py b/client/ayon_core/hosts/nuke/api/__init__.py index 2536230637..caefba766f 100644 --- a/client/ayon_core/hosts/nuke/api/__init__.py +++ b/client/ayon_core/hosts/nuke/api/__init__.py @@ -13,7 +13,6 @@ from .plugin import ( NukeCreator, NukeWriteCreator, NukeCreatorError, - OpenPypeCreator, get_instance_group_node_childs, get_colorspace_from_node ) @@ -69,7 +68,6 @@ __all__ = ( "NukeCreator", "NukeWriteCreator", "NukeCreatorError", - "OpenPypeCreator", "NukeHost", "get_instance_group_node_childs", "get_colorspace_from_node", diff --git a/client/ayon_core/hosts/nuke/api/lib.py b/client/ayon_core/hosts/nuke/api/lib.py index 2ac33de68e..e304b33dc7 100644 --- a/client/ayon_core/hosts/nuke/api/lib.py +++ b/client/ayon_core/hosts/nuke/api/lib.py @@ -45,6 +45,8 @@ from ayon_core.pipeline import ( get_current_host_name, get_current_project_name, get_current_asset_name, + AYON_INSTANCE_ID, + AVALON_INSTANCE_ID, ) from ayon_core.pipeline.context_tools import ( get_custom_workfile_template_from_session @@ -120,7 +122,7 @@ def deprecated(new_destination): class Context: main_window = None context_action_item = None - project_name = os.getenv("AVALON_PROJECT") + project_name = os.getenv("AYON_PROJECT_NAME") # Workfile related code workfiles_launched = False workfiles_tool_timer = None @@ -434,16 +436,16 @@ def set_avalon_knob_data(node, data=None, prefix="avalon:"): Examples: data = { - 'asset': 'sq020sh0280', - 'family': 'render', - 'subset': 'subsetMain' + 'folderPath': 'sq020sh0280', + 'productType': 'render', + 'productName': 'productMain' } """ data = data or dict() create = OrderedDict() tab_name = NODE_TAB_NAME - editable = ["asset", "subset", "name", "namespace"] + editable = ["folderPath", "productName", "name", "namespace"] existed_knobs = node.knobs() @@ -529,55 +531,6 @@ def get_avalon_knob_data(node, prefix="avalon:", create=True): return data -@deprecated -def fix_data_for_node_create(data): - """[DEPRECATED] Fixing data to be used for nuke knobs - """ - for k, v in data.items(): - if isinstance(v, six.text_type): - data[k] = str(v) - if str(v).startswith("0x"): - data[k] = int(v, 16) - return data - - -@deprecated -def add_write_node_legacy(name, **kwarg): - """[DEPRECATED] Adding nuke write node - Arguments: - name (str): nuke node name - kwarg (attrs): data for nuke knobs - Returns: - node (obj): nuke write node - """ - use_range_limit = kwarg.get("use_range_limit", None) - - w = nuke.createNode( - "Write", - "name {}".format(name), - inpanel=False - ) - - w["file"].setValue(kwarg["file"]) - - for k, v in kwarg.items(): - if "frame_range" in k: - continue - log.info([k, v]) - try: - w[k].setValue(v) - except KeyError as e: - log.debug(e) - continue - - if use_range_limit: - w["use_limit"].setValue(True) - w["first"].setValue(kwarg["frame_range"][0]) - w["last"].setValue(kwarg["frame_range"][1]) - - return w - - def add_write_node(name, file_path, knobs, **kwarg): """Adding nuke write node @@ -709,98 +662,18 @@ def get_nuke_imageio_settings(): return get_project_settings(Context.project_name)["nuke"]["imageio"] -@deprecated("ayon_core.hosts.nuke.api.lib.get_nuke_imageio_settings") -def get_created_node_imageio_setting_legacy(nodeclass, creator, subset): - '''[DEPRECATED] Get preset data for dataflow (fileType, compression, bitDepth) - ''' - - assert any([creator, nodeclass]), nuke.message( - "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)) - - imageio_nodes = get_nuke_imageio_settings()["nodes"] - required_nodes = imageio_nodes["requiredNodes"] - - # HACK: for backward compatibility this needs to be optional - override_nodes = imageio_nodes.get("overrideNodes", []) - - imageio_node = None - for node in required_nodes: - log.info(node) - if ( - nodeclass in node["nukeNodeClass"] - and creator in node["plugins"] - ): - imageio_node = node - break - - log.debug("__ imageio_node: {}".format(imageio_node)) - - # find matching override node - override_imageio_node = None - for onode in override_nodes: - log.info(onode) - if nodeclass not in node["nukeNodeClass"]: - continue - - if creator not in node["plugins"]: - continue - - if ( - onode["subsets"] - and not any( - re.search(s.lower(), subset.lower()) - for s in onode["subsets"] - ) - ): - continue - - override_imageio_node = onode - break - - log.debug("__ override_imageio_node: {}".format(override_imageio_node)) - # add overrides to imageio_node - if override_imageio_node: - # get all knob names in imageio_node - knob_names = [k["name"] for k in imageio_node["knobs"]] - - for oknob in override_imageio_node["knobs"]: - for knob in imageio_node["knobs"]: - # override matching knob name - if oknob["name"] == knob["name"]: - log.debug( - "_ overriding knob: `{}` > `{}`".format( - knob, oknob - )) - if not oknob["value"]: - # remove original knob if no value found in oknob - imageio_node["knobs"].remove(knob) - else: - # override knob value with oknob's - knob["value"] = oknob["value"] - - # add missing knobs into imageio_node - if oknob["name"] not in knob_names: - log.debug( - "_ adding knob: `{}`".format(oknob)) - imageio_node["knobs"].append(oknob) - knob_names.append(oknob["name"]) - - log.info("ImageIO node: {}".format(imageio_node)) - return imageio_node - - -def get_imageio_node_setting(node_class, plugin_name, subset): +def get_imageio_node_setting(node_class, plugin_name, product_name): ''' Get preset data for dataflow (fileType, compression, bitDepth) ''' imageio_nodes = get_nuke_imageio_settings()["nodes"] - required_nodes = imageio_nodes["requiredNodes"] + required_nodes = imageio_nodes["required_nodes"] imageio_node = None for node in required_nodes: log.info(node) if ( - node_class in node["nukeNodeClass"] - and plugin_name in node["plugins"] + node_class in node["nuke_node_class"] + and plugin_name in node["plugins"] ): imageio_node = node break @@ -814,7 +687,7 @@ def get_imageio_node_setting(node_class, plugin_name, subset): get_imageio_node_override_setting( node_class, plugin_name, - subset, + product_name, imageio_node["knobs"] ) @@ -823,28 +696,29 @@ def get_imageio_node_setting(node_class, plugin_name, subset): def get_imageio_node_override_setting( - node_class, plugin_name, subset, knobs_settings + node_class, plugin_name, product_name, knobs_settings ): ''' Get imageio node overrides from settings ''' imageio_nodes = get_nuke_imageio_settings()["nodes"] - override_nodes = imageio_nodes["overrideNodes"] + override_nodes = imageio_nodes["override_nodes"] # find matching override node override_imageio_node = None for onode in override_nodes: log.debug("__ onode: {}".format(onode)) - log.debug("__ subset: {}".format(subset)) - if node_class not in onode["nukeNodeClass"]: + log.debug("__ productName: {}".format(product_name)) + if node_class not in onode["nuke_node_class"]: continue if plugin_name not in onode["plugins"]: continue + # TODO change 'subsets' to 'product_names' in settings if ( onode["subsets"] and not any( - re.search(s.lower(), subset.lower()) + re.search(s.lower(), product_name.lower()) for s in onode["subsets"] ) ): @@ -860,26 +734,31 @@ def get_imageio_node_override_setting( knob_names = [k["name"] for k in knobs_settings] for oknob in override_imageio_node["knobs"]: + oknob_name = oknob["name"] + oknob_type = oknob["type"] + oknob_value = oknob[oknob_type] for knob in knobs_settings: - # override matching knob name - if oknob["name"] == knob["name"]: - log.debug( - "_ overriding knob: `{}` > `{}`".format( - knob, oknob - )) - if not oknob["value"]: - # remove original knob if no value found in oknob - knobs_settings.remove(knob) - else: - # override knob value with oknob's - knob["value"] = oknob["value"] - # add missing knobs into imageio_node - if oknob["name"] not in knob_names: - log.debug( - "_ adding knob: `{}`".format(oknob)) + if oknob_name not in knob_names: + log.debug("_ adding knob: `{}`".format(oknob)) knobs_settings.append(oknob) - knob_names.append(oknob["name"]) + knob_names.append(oknob_name) + continue + + if oknob_name != knob["name"]: + continue + + knob_type = knob["type"] + # override matching knob name + log.debug( + "_ overriding knob: `{}` > `{}`".format(knob, oknob) + ) + if not oknob_value: + # remove original knob if no value found in oknob + knobs_settings.remove(knob) + else: + # override knob value with oknob's + knob[knob_type] = oknob_value return knobs_settings @@ -888,7 +767,7 @@ def get_imageio_input_colorspace(filename): ''' Get input file colorspace based on regex in settings. ''' imageio_regex_inputs = ( - get_nuke_imageio_settings()["regexInputs"]["inputs"]) + get_nuke_imageio_settings()["regex_inputs"]["inputs"]) preset_clrsp = None for regexInput in imageio_regex_inputs: @@ -990,16 +869,16 @@ def check_inventory_versions(): version_docs = get_versions( project_name, version_ids, fields=["_id", "name", "parent"] ) - # Store versions by id and collect subset ids + # Store versions by id and collect product ids version_docs_by_id = {} - subset_ids = set() + product_ids = set() for version_doc in version_docs: version_docs_by_id[version_doc["_id"]] = version_doc - subset_ids.add(version_doc["parent"]) + product_ids.add(version_doc["parent"]) - # Query last versions based on subset ids - last_versions_by_subset_id = get_last_versions( - project_name, subset_ids=subset_ids, fields=["_id", "parent"] + # Query last versions based on product ids + last_versions_by_product_id = get_last_versions( + project_name, subset_ids=product_ids, fields=["_id", "parent"] ) # Loop through collected container nodes and their representation ids @@ -1022,9 +901,9 @@ def check_inventory_versions(): ).format(node.name())) continue - # Get last version based on subset id - subset_id = version_doc["parent"] - last_version = last_versions_by_subset_id[subset_id] + # Get last version based on product id + product_id = version_doc["parent"] + last_version = last_versions_by_product_id[product_id] # Check if last version is same as current version if last_version["_id"] == version_doc["_id"]: color_value = "0x4ecd25ff" @@ -1081,19 +960,19 @@ def version_up_script(): nukescripts.script_and_write_nodes_version_up() -def check_subsetname_exists(nodes, subset_name): +def check_product_name_exists(nodes, product_name): """ Checking if node is not already created to secure there is no duplicity Arguments: nodes (list): list of nuke.Node objects - subset_name (str): name we try to find + product_name (str): name we try to find Returns: bool: True of False """ return next((True for n in nodes - if subset_name in read_avalon_data(n).get("subset", "")), + if product_name in read_avalon_data(n).get("productName", "")), False) @@ -1134,8 +1013,12 @@ def format_anatomy(data): ) data.update(context_data) data.update({ - "subset": data["subset"], - "family": data["family"], + "subset": data["productName"], + "family": data["productType"], + "product": { + "name": data["productName"], + "type": data["productType"], + }, "frame": "#" * padding, }) return anatomy.format(data) @@ -1170,13 +1053,14 @@ def create_prenodes( prev_node, nodes_setting, plugin_name=None, - subset=None, + product_name=None, **kwargs ): last_node = None for_dependency = {} - for name, node in nodes_setting.items(): + for node in nodes_setting: # get attributes + name = node["name"] nodeclass = node["nodeclass"] knobs = node["knobs"] @@ -1193,12 +1077,12 @@ def create_prenodes( "dependent": node["dependent"] } - if all([plugin_name, subset]): + if all([plugin_name, product_name]): # find imageio overrides get_imageio_node_override_setting( now_node.Class(), plugin_name, - subset, + product_name, knobs ) @@ -1238,8 +1122,8 @@ def create_write_node( name (str): name of node data (dict): creator write instance data input (node)[optional]: selected node to connect to - prenodes (dict)[optional]: - nodes to be created before write with dependency + prenodes (Optional[list[dict]]): nodes to be created before write + with dependency review (bool)[optional]: adding review knob farm (bool)[optional]: rendering workflow target kwargs (dict)[optional]: additional key arguments for formatting @@ -1268,22 +1152,23 @@ def create_write_node( Return: node (obj): group node with avalon data as Knobs ''' - prenodes = prenodes or {} + prenodes = prenodes or [] # filtering variables plugin_name = data["creator"] - subset = data["subset"] + product_name = data["productName"] # get knob settings for write node imageio_writes = get_imageio_node_setting( node_class="Write", plugin_name=plugin_name, - subset=subset + product_name=product_name ) for knob in imageio_writes["knobs"]: if knob["name"] == "file_type": - ext = knob["value"] + knot_type = knob["type"] + ext = knob[knot_type] data.update({ "imageio_writes": imageio_writes, @@ -1326,7 +1211,7 @@ def create_write_node( prev_node, prenodes, plugin_name, - subset, + product_name, **kwargs ) if last_prenode: @@ -1398,291 +1283,17 @@ def create_write_node( # set tile color tile_color = next( iter( - k["value"] for k in imageio_writes["knobs"] + k[k["type"]] for k in imageio_writes["knobs"] if "tile_color" in k["name"] ), [255, 0, 0, 255] ) + new_tile_color = [] + for c in tile_color: + if isinstance(c, float): + c = int(c * 255) + new_tile_color.append(c) GN["tile_color"].setValue( - color_gui_to_int(tile_color)) - - return GN - - -@deprecated("ayon_core.hosts.nuke.api.lib.create_write_node") -def create_write_node_legacy( - name, data, input=None, prenodes=None, - review=True, linked_knobs=None, farm=True -): - ''' Creating write node which is group node - - Arguments: - name (str): name of node - data (dict): data to be imprinted - input (node): selected node to connect to - prenodes (list, optional): list of lists, definitions for nodes - to be created before write - review (bool): adding review knob - - Example: - prenodes = [ - { - "nodeName": { - "class": "" # string - "knobs": [ - ("knobName": value), - ... - ], - "dependent": [ - following_node_01, - ... - ] - } - }, - ... - ] - - Return: - node (obj): group node with avalon data as Knobs - ''' - knob_overrides = data.get("knobs", []) - nodeclass = data["nodeclass"] - creator = data["creator"] - subset = data["subset"] - - imageio_writes = get_created_node_imageio_setting_legacy( - nodeclass, creator, subset - ) - for knob in imageio_writes["knobs"]: - if knob["name"] == "file_type": - representation = knob["value"] - - host_name = get_current_host_name() - try: - data.update({ - "app": host_name, - "imageio_writes": imageio_writes, - "representation": representation, - }) - anatomy_filled = format_anatomy(data) - - except Exception as e: - msg = "problem with resolving anatomy template: {}".format(e) - log.error(msg) - nuke.message(msg) - - # build file path to workfiles - fdir = str(anatomy_filled["work"]["folder"]).replace("\\", "/") - fpath = data["fpath_template"].format( - work=fdir, version=data["version"], subset=data["subset"], - frame=data["frame"], - ext=representation - ) - - # create directory - if not os.path.isdir(os.path.dirname(fpath)): - log.warning("Path does not exist! I am creating it.") - os.makedirs(os.path.dirname(fpath)) - - _data = OrderedDict({ - "file": fpath - }) - - # adding dataflow template - log.debug("imageio_writes: `{}`".format(imageio_writes)) - for knob in imageio_writes["knobs"]: - _data[knob["name"]] = knob["value"] - - _data = fix_data_for_node_create(_data) - - log.debug("_data: `{}`".format(_data)) - - if "frame_range" in data.keys(): - _data["frame_range"] = data.get("frame_range", None) - log.debug("_data[frame_range]: `{}`".format(_data["frame_range"])) - - GN = nuke.createNode("Group", "name {}".format(name)) - - prev_node = None - with GN: - if input: - input_name = str(input.name()).replace(" ", "") - # if connected input node was defined - prev_node = nuke.createNode( - "Input", "name {}".format(input_name)) - else: - # generic input node connected to nothing - prev_node = nuke.createNode( - "Input", - "name {}".format("rgba"), - inpanel=False - ) - # creating pre-write nodes `prenodes` - if prenodes: - for node in prenodes: - # get attributes - pre_node_name = node["name"] - klass = node["class"] - knobs = node["knobs"] - dependent = node["dependent"] - - # create node - now_node = nuke.createNode( - klass, - "name {}".format(pre_node_name), - inpanel=False - ) - - # add data to knob - for _knob in knobs: - knob, value = _knob - try: - now_node[knob].value() - except NameError: - log.warning( - "knob `{}` does not exist on node `{}`".format( - knob, now_node["name"].value() - )) - continue - - if not knob and not value: - continue - - log.info((knob, value)) - - if isinstance(value, str): - if "[" in value: - now_node[knob].setExpression(value) - else: - now_node[knob].setValue(value) - - # connect to previous node - if dependent: - if isinstance(dependent, (tuple or list)): - for i, node_name in enumerate(dependent): - input_node = nuke.createNode( - "Input", - "name {}".format(node_name), - inpanel=False - ) - now_node.setInput(1, input_node) - - elif isinstance(dependent, str): - input_node = nuke.createNode( - "Input", - "name {}".format(node_name), - inpanel=False - ) - now_node.setInput(0, input_node) - - else: - now_node.setInput(0, prev_node) - - # switch actual node to previous - prev_node = now_node - - # creating write node - - write_node = now_node = add_write_node_legacy( - "inside_{}".format(name), - **_data - ) - # connect to previous node - now_node.setInput(0, prev_node) - - # switch actual node to previous - prev_node = now_node - - now_node = nuke.createNode("Output", "name Output1", inpanel=False) - - # connect to previous node - now_node.setInput(0, prev_node) - - # imprinting group node - set_avalon_knob_data(GN, data["avalon"]) - add_publish_knob(GN) - add_rendering_knobs(GN, farm) - - if review: - add_review_knob(GN) - - # add divider - GN.addKnob(nuke.Text_Knob('', 'Rendering')) - - # Add linked knobs. - linked_knob_names = [] - - # add input linked knobs and create group only if any input - if linked_knobs: - linked_knob_names.append("_grp-start_") - linked_knob_names.extend(linked_knobs) - linked_knob_names.append("_grp-end_") - - linked_knob_names.append("Render") - - for _k_name in linked_knob_names: - if "_grp-start_" in _k_name: - knob = nuke.Tab_Knob( - "rnd_attr", "Rendering attributes", nuke.TABBEGINCLOSEDGROUP) - GN.addKnob(knob) - elif "_grp-end_" in _k_name: - knob = nuke.Tab_Knob( - "rnd_attr_end", "Rendering attributes", nuke.TABENDGROUP) - GN.addKnob(knob) - else: - if "___" in _k_name: - # add divider - GN.addKnob(nuke.Text_Knob("")) - else: - # add linked knob by _k_name - link = nuke.Link_Knob("") - link.makeLink(write_node.name(), _k_name) - link.setName(_k_name) - - # make render - if "Render" in _k_name: - link.setLabel("Render Local") - link.setFlag(0x1000) - GN.addKnob(link) - - # adding write to read button - add_button_write_to_read(GN) - - # adding write to read button - add_button_clear_rendered(GN, os.path.dirname(fpath)) - - # Deadline tab. - add_deadline_tab(GN) - - # open the our Tab as default - GN[_NODE_TAB_NAME].setFlag(0) - - # set tile color - tile_color = _data.get("tile_color", "0xff0000ff") - GN["tile_color"].setValue(tile_color) - - # override knob values from settings - for knob in knob_overrides: - knob_type = knob["type"] - knob_name = knob["name"] - knob_value = knob["value"] - if knob_name not in GN.knobs(): - continue - if not knob_value: - continue - - # set correctly knob types - if knob_type == "string": - knob_value = str(knob_value) - if knob_type == "number": - knob_value = int(knob_value) - if knob_type == "decimal_number": - knob_value = float(knob_value) - if knob_type == "bool": - knob_value = bool(knob_value) - if knob_type in ["2d_vector", "3d_vector", "color", "box"]: - knob_value = list(knob_value) - - GN[knob_name].setValue(knob_value) + color_gui_to_int(new_tile_color)) return GN @@ -1699,42 +1310,32 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs): """ for knob in knob_settings: log.debug("__ knob: {}".format(pformat(knob))) - knob_type = knob["type"] knob_name = knob["name"] - if knob_name not in node.knobs(): continue + knob_type = knob["type"] + knob_value = knob[knob_type] if knob_type == "expression": - knob_expression = knob["expression"] - node[knob_name].setExpression( - knob_expression - ) + node[knob_name].setExpression(knob_value) continue # first deal with formattable knob settings if knob_type == "formatable": - template = knob["template"] - to_type = knob["to_type"] + template = knob_value["template"] + to_type = knob_value["to_type"] try: - _knob_value = template.format( - **kwargs - ) + knob_value = template.format(**kwargs) except KeyError as msg: raise KeyError( "Not able to format expression: {}".format(msg)) # convert value to correct type if to_type == "2d_vector": - knob_value = _knob_value.split(";").split(",") - else: - knob_value = _knob_value + knob_value = knob_value.split(";").split(",") knob_type = to_type - else: - knob_value = knob["value"] - if not knob_value: continue @@ -1745,112 +1346,51 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs): def convert_knob_value_to_correct_type(knob_type, knob_value): - # first convert string types to string - # just to ditch unicode - if isinstance(knob_value, six.text_type): - knob_value = str(knob_value) + # Convert 'text' to string to avoid unicode + if knob_type == "text": + return str(knob_value) - # set correctly knob types - if knob_type == "bool": - knob_value = bool(knob_value) - elif knob_type == "decimal_number": - knob_value = float(knob_value) - elif knob_type == "number": - knob_value = int(knob_value) - elif knob_type == "text": - knob_value = knob_value - elif knob_type == "color_gui": - knob_value = color_gui_to_int(knob_value) - elif knob_type in ["2d_vector", "3d_vector", "color", "box"]: - knob_value = [float(val_) for val_ in knob_value] + if knob_type == "boolean": + return bool(knob_value) + + if knob_type == "decimal_number": + return float(knob_value) + + if knob_type == "number": + return int(knob_value) + + if knob_type == "color_gui": + new_color = [] + for value in knob_value: + if isinstance(value, float): + value = int(value * 255) + new_color.append(value) + return color_gui_to_int(new_color) + + if knob_type == "box": + return [ + knob_value["x"], knob_value["y"], + knob_value["r"], knob_value["t"] + ] + + if knob_type == "vector_2d": + return [knob_value["x"], knob_value["y"]] + + if knob_type == "vector_3d": + return [knob_value["x"], knob_value["y"], knob_value["z"]] return knob_value def color_gui_to_int(color_gui): + # Append alpha channel if not present + if len(color_gui) == 3: + color_gui = list(color_gui) + [255] hex_value = ( "0x{0:0>2x}{1:0>2x}{2:0>2x}{3:0>2x}").format(*color_gui) return int(hex_value, 16) -@deprecated -def add_rendering_knobs(node, farm=True): - ''' Adds additional rendering knobs to given node - - Arguments: - node (obj): nuke node object to be fixed - - Return: - node (obj): with added knobs - ''' - knob_options = ["Use existing frames", "Local"] - if farm: - knob_options.append("On farm") - - if "render" not in node.knobs(): - knob = nuke.Enumeration_Knob("render", "", knob_options) - knob.clearFlag(nuke.STARTLINE) - node.addKnob(knob) - return node - - -@deprecated -def add_review_knob(node): - ''' Adds additional review knob to given node - - Arguments: - node (obj): nuke node object to be fixed - - Return: - node (obj): with added knob - ''' - if "review" not in node.knobs(): - knob = nuke.Boolean_Knob("review", "Review") - knob.setValue(True) - node.addKnob(knob) - return node - - -@deprecated -def add_deadline_tab(node): - # TODO: remove this as it is only linked to legacy create - node.addKnob(nuke.Tab_Knob("Deadline")) - - knob = nuke.Int_Knob("deadlinePriority", "Priority") - knob.setValue(50) - node.addKnob(knob) - - knob = nuke.Int_Knob("deadlineChunkSize", "Chunk Size") - knob.setValue(0) - node.addKnob(knob) - - knob = nuke.Int_Knob("deadlineConcurrentTasks", "Concurrent tasks") - # zero as default will get value from Settings during collection - # instead of being an explicit user override, see precollect_write.py - knob.setValue(0) - node.addKnob(knob) - - knob = nuke.Text_Knob("divd", '') - knob.setValue('') - node.addKnob(knob) - - knob = nuke.Boolean_Knob("suspend_publish", "Suspend publish") - knob.setValue(False) - node.addKnob(knob) - - -@deprecated -def get_deadline_knob_names(): - # TODO: remove this as it is only linked to legacy - # validate_write_deadline_tab - return [ - "Deadline", - "deadlineChunkSize", - "deadlinePriority", - "deadlineConcurrentTasks" - ] - - def create_backdrop(label="", color=None, layer=0, nodes=None): """ @@ -1922,10 +1462,7 @@ class WorkfileSettings(object): Context._project_doc = project_doc self._project_name = project_name - self._asset = ( - kwargs.get("asset_name") - or get_current_asset_name() - ) + self._asset = get_current_asset_name() self._asset_entity = get_asset_by_name(project_name, self._asset) self._root_node = root_node or nuke.root() self._nodes = self.get_nodes(nodes=nodes) @@ -2014,41 +1551,21 @@ class WorkfileSettings(object): host_name="nuke" ) - workfile_settings = imageio_host["workfile"] viewer_process_settings = imageio_host["viewer"]["viewerProcess"] + workfile_settings = imageio_host["workfile"] + color_management = workfile_settings["color_management"] + native_ocio_config = workfile_settings["native_ocio_config"] if not config_data: - # TODO: backward compatibility for old projects - remove later - # perhaps old project overrides is having it set to older version - # with use of `customOCIOConfigPath` - resolved_path = None - if workfile_settings.get("customOCIOConfigPath"): - unresolved_path = workfile_settings["customOCIOConfigPath"] - ocio_paths = unresolved_path[platform.system().lower()] + # no ocio config found and no custom path used + if self._root_node["colorManagement"].value() \ + not in color_management: + self._root_node["colorManagement"].setValue(color_management) - for ocio_p in ocio_paths: - resolved_path = str(ocio_p).format(**os.environ) - if not os.path.exists(resolved_path): - continue - - if resolved_path: - # set values to root - self._root_node["colorManagement"].setValue("OCIO") - self._root_node["OCIO_config"].setValue("custom") - self._root_node["customOCIOConfigPath"].setValue( - resolved_path) - else: - # no ocio config found and no custom path used - if self._root_node["colorManagement"].value() \ - not in str(workfile_settings["colorManagement"]): - self._root_node["colorManagement"].setValue( - str(workfile_settings["colorManagement"])) - - # second set ocio version - if self._root_node["OCIO_config"].value() \ - not in str(workfile_settings["OCIO_config"]): - self._root_node["OCIO_config"].setValue( - str(workfile_settings["OCIO_config"])) + # second set ocio version + if self._root_node["OCIO_config"].value() \ + not in native_ocio_config: + self._root_node["OCIO_config"].setValue(native_ocio_config) else: # OCIO config path is defined from prelaunch hook @@ -2061,22 +1578,17 @@ class WorkfileSettings(object): residual_path )) - # we dont need the key anymore - workfile_settings.pop("customOCIOConfigPath", None) - workfile_settings.pop("colorManagement", None) - workfile_settings.pop("OCIO_config", None) - # get monitor lut from settings respecting Nuke version differences - monitor_lut = workfile_settings.pop("monitorLut", None) + monitor_lut = workfile_settings["thumbnail_space"] monitor_lut_data = self._get_monitor_settings( - viewer_process_settings, monitor_lut) - - # set monitor related knobs luts (MonitorOut, Thumbnails) - for knob, value_ in monitor_lut_data.items(): - workfile_settings[knob] = value_ + viewer_process_settings, monitor_lut + ) + monitor_lut_data["workingSpaceLUT"] = ( + workfile_settings["working_space"] + ) # then set the rest - for knob, value_ in workfile_settings.items(): + for knob, value_ in monitor_lut_data.items(): # skip unfilled ocio config path # it will be dict in value if isinstance(value_, dict): @@ -2300,12 +1812,16 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies. # backward compatibility # TODO: remove this once old avalon data api will be removed avalon_knob_data - and avalon_knob_data.get("id") != "pyblish.avalon.instance" + and avalon_knob_data.get("id") not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + } ): continue elif ( node_data - and node_data.get("id") != "pyblish.avalon.instance" + and node_data.get("id") not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + } ): continue @@ -2325,14 +1841,17 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies. nuke_imageio_writes = None if avalon_knob_data: # establish families - families = [avalon_knob_data["family"]] + product_type = avalon_knob_data.get("productType") + if product_type is None: + product_type = avalon_knob_data["family"] + families = [product_type] if avalon_knob_data.get("families"): families.append(avalon_knob_data.get("families")) nuke_imageio_writes = get_imageio_node_setting( node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], - subset=avalon_knob_data["subset"] + product_name=avalon_knob_data["productName"] ) elif node_data: nuke_imageio_writes = get_write_node_template_attr(node) @@ -2354,25 +1873,8 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies. if not write_node: return - try: - # write all knobs to node - for knob in nuke_imageio_writes["knobs"]: - value = knob["value"] - if isinstance(value, six.text_type): - value = str(value) - if str(value).startswith("0x"): - value = int(value, 16) - - log.debug("knob: {}| value: {}".format( - knob["name"], value - )) - write_node[knob["name"]].setValue(value) - except TypeError: - log.warning( - "Legacy workflow didn't work, switching to current") - - set_node_knobs_from_settings( - write_node, nuke_imageio_writes["knobs"]) + set_node_knobs_from_settings( + write_node, nuke_imageio_writes["knobs"]) def set_reads_colorspace(self, read_clrs_inputs): """ Setting colorspace to Read nodes @@ -2450,7 +1952,7 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies. log.error(_error) log.info("Setting colorspace to read nodes...") - read_clrs_inputs = nuke_colorspace["regexInputs"].get("inputs", []) + read_clrs_inputs = nuke_colorspace["regex_inputs"].get("inputs", []) if read_clrs_inputs: self.set_reads_colorspace(read_clrs_inputs) @@ -2605,7 +2107,7 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies. def set_favorites(self): from .utils import set_context_favorites - work_dir = os.getenv("AVALON_WORKDIR") + work_dir = os.getenv("AYON_WORKDIR") asset = get_current_asset_name() favorite_items = OrderedDict() @@ -2648,10 +2150,13 @@ def get_write_node_template_attr(node): identifier = node_data["creator_identifier"] # return template data + product_name = node_data.get("productName") + if product_name is None: + product_name = node_data["subset"] return get_imageio_node_setting( node_class="Write", plugin_name=plugin_names_mapping[identifier], - subset=node_data["subset"] + product_name=product_name ) @@ -2953,7 +2458,7 @@ def process_workfile_builder(): create_fv_on = workfile_builder.get("create_first_version") or None builder_on = workfile_builder.get("builder_on_start") or None - last_workfile_path = os.environ.get("AVALON_LAST_WORKFILE") + last_workfile_path = os.environ.get("AYON_LAST_WORKFILE") # generate first version in file not existing and feature is enabled if create_fv_on and not os.path.exists(last_workfile_path): @@ -3006,72 +2511,6 @@ def start_workfile_template_builder(): log.warning("Template profile not found. Skipping...") -@deprecated -def recreate_instance(origin_node, avalon_data=None): - """Recreate input instance to different data - - Args: - origin_node (nuke.Node): Nuke node to be recreating from - avalon_data (dict, optional): data to be used in new node avalon_data - - Returns: - nuke.Node: newly created node - """ - knobs_wl = ["render", "publish", "review", "ypos", - "use_limit", "first", "last"] - # get data from avalon knobs - data = get_avalon_knob_data( - origin_node) - - # add input data to avalon data - if avalon_data: - data.update(avalon_data) - - # capture all node knobs allowed in op_knobs - knobs_data = {k: origin_node[k].value() - for k in origin_node.knobs() - for key in knobs_wl - if key in k} - - # get node dependencies - inputs = origin_node.dependencies() - outputs = origin_node.dependent() - - # remove the node - nuke.delete(origin_node) - - # create new node - # get appropriate plugin class - creator_plugin = None - for Creator in discover_legacy_creator_plugins(): - if Creator.__name__ == data["creator"]: - creator_plugin = Creator - break - - # create write node with creator - new_node_name = data["subset"] - new_node = creator_plugin(new_node_name, data["asset"]).process() - - # white listed knobs to the new node - for _k, _v in knobs_data.items(): - try: - print(_k, _v) - new_node[_k].setValue(_v) - except Exception as e: - print(e) - - # connect to original inputs - for i, n in enumerate(inputs): - new_node.setInput(i, n) - - # connect to outputs - if len(outputs) > 0: - for dn in outputs: - dn.setInput(0, new_node) - - return new_node - - def add_scripts_menu(): try: from scriptsmenu import launchfornuke @@ -3203,7 +2642,7 @@ class DirmapCache: @classmethod def project_name(cls): if cls._project_name is None: - cls._project_name = os.getenv("AVALON_PROJECT") + cls._project_name = os.getenv("AYON_PROJECT_NAME") return cls._project_name @classmethod diff --git a/client/ayon_core/hosts/nuke/api/pipeline.py b/client/ayon_core/hosts/nuke/api/pipeline.py index bdba0757b6..582df952d3 100644 --- a/client/ayon_core/hosts/nuke/api/pipeline.py +++ b/client/ayon_core/hosts/nuke/api/pipeline.py @@ -18,13 +18,17 @@ from ayon_core.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, register_inventory_action_path, + AYON_INSTANCE_ID, + AVALON_INSTANCE_ID, AVALON_CONTAINER_ID, get_current_asset_name, get_current_task_name, + registered_host, ) from ayon_core.pipeline.workfile import BuildWorkfile from ayon_core.tools.utils import host_tools from ayon_core.hosts.nuke import NUKE_ROOT_DIR +from ayon_core.tools.workfile_template_build import open_template_ui from .command import viewer_update_and_undo_stop from .lib import ( @@ -55,6 +59,7 @@ from .workfile_template_builder import ( build_workfile_template, create_placeholder, update_placeholder, + NukeTemplateBuilder, ) from .workio import ( open_file, @@ -176,7 +181,7 @@ def add_nuke_callbacks(): nuke.addOnScriptLoad(WorkfileSettings().set_context_settings) - if nuke_settings["nuke-dirmap"]["enabled"]: + if nuke_settings["dirmap"]["enabled"]: log.info("Added Nuke's dir-mapping callback ...") # Add dirmap for file paths. nuke.addFilenameFilter(dirmap_file_name_filter) @@ -313,7 +318,7 @@ def _install_menu(): lambda: BuildWorkfile().process() ) - menu_template = menu.addMenu("Template Builder") # creating template menu + menu_template = menu.addMenu("Template Builder") menu_template.addCommand( "Build Workfile from template", lambda: build_workfile_template() @@ -321,6 +326,12 @@ def _install_menu(): if not ASSIST: menu_template.addSeparator() + menu_template.addCommand( + "Open template", + lambda: open_template_ui( + NukeTemplateBuilder(registered_host()), get_main_window() + ) + ) menu_template.addCommand( "Create Place Holder", lambda: create_placeholder() @@ -519,7 +530,7 @@ def list_instances(creator_id=None): (list) of dictionaries matching instances format """ instances_by_order = defaultdict(list) - subset_instances = [] + product_instances = [] instance_ids = set() for node in nuke.allNodes(recurseGroups=True): @@ -541,7 +552,9 @@ def list_instances(creator_id=None): if not instance_data: continue - if instance_data["id"] != "pyblish.avalon.instance": + if instance_data["id"] not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: continue if creator_id and instance_data["creator_identifier"] != creator_id: @@ -555,51 +568,59 @@ def list_instances(creator_id=None): else: instance_ids.add(instance_id) - # node name could change, so update subset name data - _update_subset_name_data(instance_data, node) + # node name could change, so update product name data + _update_product_name_data(instance_data, node) if "render_order" not in node.knobs(): - subset_instances.append((node, instance_data)) + product_instances.append((node, instance_data)) continue order = int(node["render_order"].value()) instances_by_order[order].append((node, instance_data)) - # Sort instances based on order attribute or subset name. + # Sort instances based on order attribute or product name. # TODO: remove in future Publisher enhanced with sorting ordered_instances = [] for key in sorted(instances_by_order.keys()): - instances_by_subset = defaultdict(list) + instances_by_product = defaultdict(list) for node, data_ in instances_by_order[key]: - instances_by_subset[data_["subset"]].append((node, data_)) - for subkey in sorted(instances_by_subset.keys()): - ordered_instances.extend(instances_by_subset[subkey]) + product_name = data_.get("productName") + if product_name is None: + product_name = data_.get("subset") + instances_by_product[product_name].append((node, data_)) + for subkey in sorted(instances_by_product.keys()): + ordered_instances.extend(instances_by_product[subkey]) - instances_by_subset = defaultdict(list) - for node, data_ in subset_instances: - instances_by_subset[data_["subset"]].append((node, data_)) - for key in sorted(instances_by_subset.keys()): - ordered_instances.extend(instances_by_subset[key]) + instances_by_product = defaultdict(list) + for node, data_ in product_instances: + product_name = data_.get("productName") + if product_name is None: + product_name = data_.get("subset") + instances_by_product[product_name].append((node, data_)) + for key in sorted(instances_by_product.keys()): + ordered_instances.extend(instances_by_product[key]) return ordered_instances -def _update_subset_name_data(instance_data, node): - """Update subset name data in instance data. +def _update_product_name_data(instance_data, node): + """Update product name data in instance data. Args: instance_data (dict): instance creator data node (nuke.Node): nuke node """ - # make sure node name is subset name - old_subset_name = instance_data["subset"] + # make sure node name is product name + old_product_name = instance_data.get("productName") + if old_product_name is None: + old_product_name = instance_data.get("subset") old_variant = instance_data["variant"] - subset_name_root = old_subset_name.replace(old_variant, "") + product_name_root = old_product_name.replace(old_variant, "") - new_subset_name = node.name() - new_variant = new_subset_name.replace(subset_name_root, "") + new_product_name = node.name() + new_variant = new_product_name.replace(product_name_root, "") - instance_data["subset"] = new_subset_name + instance_data["productName"] = new_product_name instance_data["variant"] = new_variant diff --git a/client/ayon_core/hosts/nuke/api/plugin.py b/client/ayon_core/hosts/nuke/api/plugin.py index 4b8ddac167..b36dfc56e6 100644 --- a/client/ayon_core/hosts/nuke/api/plugin.py +++ b/client/ayon_core/hosts/nuke/api/plugin.py @@ -19,7 +19,9 @@ from ayon_core.pipeline import ( CreatorError, Creator as NewCreator, CreatedInstance, - get_current_task_name + get_current_task_name, + AYON_INSTANCE_ID, + AVALON_INSTANCE_ID, ) from ayon_core.pipeline.colorspace import ( get_display_view_colorspace_name, @@ -32,7 +34,7 @@ from ayon_core.lib.transcoding import ( from .lib import ( INSTANCE_DATA_KNOB, Knobby, - check_subsetname_exists, + check_product_name_exists, maintained_selection, get_avalon_knob_data, set_avalon_knob_data, @@ -85,15 +87,15 @@ class NukeCreator(NewCreator): for pass_key in keys: creator_attrs[pass_key] = pre_create_data[pass_key] - def check_existing_subset(self, subset_name): - """Make sure subset name is unique. + def check_existing_product(self, product_name): + """Make sure product name is unique. It search within all nodes recursively - and checks if subset name is found in + and checks if product name is found in any node having instance data knob. Arguments: - subset_name (str): Subset name + product_name (str): Subset name """ for node in nuke.allNodes(recurseGroups=True): @@ -106,14 +108,14 @@ class NukeCreator(NewCreator): # a node has no instance data continue - # test if subset name is matching - if node_data.get("subset") == subset_name: + # test if product name is matching + if node_data.get("productType") == product_name: raise NukeCreatorError( ( "A publish instance for '{}' already exists " "in nodes! Please change the variant " "name to ensure unique output." - ).format(subset_name) + ).format(product_name) ) def create_instance_node( @@ -165,22 +167,22 @@ class NukeCreator(NewCreator): else: self.selected_nodes = [] - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): # make sure selected nodes are added self.set_selected_nodes(pre_create_data) - # make sure subset name is unique - self.check_existing_subset(subset_name) + # make sure product name is unique + self.check_existing_product(product_name) try: instance_node = self.create_instance_node( - subset_name, + product_name, node_type=instance_data.pop("node_type", None) ) instance = CreatedInstance( - self.family, - subset_name, + self.product_type, + product_name, instance_data, self ) @@ -225,10 +227,10 @@ class NukeCreator(NewCreator): for created_inst, changes in update_list: instance_node = created_inst.transient_data["node"] - # update instance node name if subset name changed - if "subset" in changes.changed_keys: + # update instance node name if product name changed + if "productName" in changes.changed_keys: instance_node["name"].setValue( - changes["subset"].new_value + changes["productName"].new_value ) # in case node is not existing anymore (user erased it manually) @@ -269,7 +271,7 @@ class NukeWriteCreator(NukeCreator): identifier = "create_write" label = "Create Write" - family = "write" + product_type = "write" icon = "sign-out" def get_linked_knobs(self): @@ -353,22 +355,22 @@ class NukeWriteCreator(NukeCreator): label="Review" ) - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): # make sure selected nodes are added self.set_selected_nodes(pre_create_data) - # make sure subset name is unique - self.check_existing_subset(subset_name) + # make sure product name is unique + self.check_existing_product(product_name) instance_node = self.create_instance_node( - subset_name, + product_name, instance_data ) try: instance = CreatedInstance( - self.family, - subset_name, + self.product_type, + product_name, instance_data, self ) @@ -394,65 +396,25 @@ class NukeWriteCreator(NukeCreator): # plugin settings plugin_settings = self.get_creator_settings(project_settings) - + temp_rendering_path_template = ( + plugin_settings.get("temp_rendering_path_template") + or self.temp_rendering_path_template + ) + # TODO remove template key replacements + temp_rendering_path_template = ( + temp_rendering_path_template + .replace("{product[name]}", "{subset}") + .replace("{product[type]}", "{family}") + .replace("{task[name]}", "{task}") + .replace("{folder[name]}", "{asset}") + ) # individual attributes self.instance_attributes = plugin_settings.get( "instance_attributes") or self.instance_attributes self.prenodes = plugin_settings["prenodes"] self.default_variants = plugin_settings.get( "default_variants") or self.default_variants - self.temp_rendering_path_template = ( - plugin_settings.get("temp_rendering_path_template") - or self.temp_rendering_path_template - ) - - -class OpenPypeCreator(LegacyCreator): - """Pype Nuke Creator class wrapper""" - node_color = "0xdfea5dff" - - def __init__(self, *args, **kwargs): - super(OpenPypeCreator, self).__init__(*args, **kwargs) - if check_subsetname_exists( - nuke.allNodes(), - self.data["subset"]): - msg = ("The subset name `{0}` is already used on a node in" - "this workfile.".format(self.data["subset"])) - self.log.error(msg + "\n\nPlease use other subset name!") - raise NameError("`{0}: {1}".format(__name__, msg)) - return - - def process(self): - from nukescripts import autoBackdrop - - instance = None - - if (self.options or {}).get("useSelection"): - - nodes = nuke.selectedNodes() - if not nodes: - nuke.message("Please select nodes that you " - "wish to add to a container") - return - - elif len(nodes) == 1: - # only one node is selected - instance = nodes[0] - - if not instance: - # Not using selection or multiple nodes selected - bckd_node = autoBackdrop() - bckd_node["tile_color"].setValue(int(self.node_color, 16)) - bckd_node["note_font_size"].setValue(24) - bckd_node["label"].setValue("[{}]".format(self.name)) - - instance = bckd_node - - # add avalon knobs - set_avalon_knob_data(instance, self.data) - add_publish_knob(instance) - - return instance + self.temp_rendering_path_template = temp_rendering_path_template def get_instance_group_node_childs(instance): @@ -493,7 +455,7 @@ def get_colorspace_from_node(node): def get_review_presets_config(): settings = get_current_project_settings() review_profiles = ( - settings["global"] + settings["core"] ["publish"] ["ExtractReview"] ["profiles"] @@ -909,8 +871,8 @@ class ExporterReviewMov(ExporterReview): self.log.info( "__ add_custom_tags: `{0}`".format(add_custom_tags)) - subset = self.instance.data["subset"] - self._temp_nodes[subset] = [] + product_name = self.instance.data["productName"] + self._temp_nodes[product_name] = [] # Read node r_node = nuke.createNode("Read") @@ -930,7 +892,9 @@ class ExporterReviewMov(ExporterReview): r_node["raw"].setValue(1) # connect to Read node - self._shift_to_previous_node_and_temp(subset, r_node, "Read... `{}`") + self._shift_to_previous_node_and_temp( + product_name, r_node, "Read... `{}`" + ) # add reformat node reformat_nodes_config = kwargs["reformat_nodes_config"] @@ -944,7 +908,7 @@ class ExporterReviewMov(ExporterReview): # connect in order self._connect_to_above_nodes( - node, subset, "Reposition node... `{}`" + node, product_name, "Reposition node... `{}`" ) # append reformated tag add_tags.append("reformated") @@ -956,7 +920,9 @@ class ExporterReviewMov(ExporterReview): ipn = get_view_process_node() if ipn is not None: # connect to ViewProcess node - self._connect_to_above_nodes(ipn, subset, "ViewProcess... `{}`") + self._connect_to_above_nodes( + ipn, product_name, "ViewProcess... `{}`" + ) if not self.viewer_lut_raw: # OCIODisplay @@ -980,7 +946,9 @@ class ExporterReviewMov(ExporterReview): view=viewer ) - self._connect_to_above_nodes(dag_node, subset, "OCIODisplay... `{}`") + self._connect_to_above_nodes( + dag_node, product_name, "OCIODisplay... `{}`" + ) # Write node write_node = nuke.createNode("Write") self.log.debug("Path: {}".format(self.path)) @@ -1007,8 +975,10 @@ class ExporterReviewMov(ExporterReview): write_node["raw"].setValue(1) # connect write_node.setInput(0, self.previous_node) - self._temp_nodes[subset].append(write_node) - self.log.debug("Write... `{}`".format(self._temp_nodes[subset])) + self._temp_nodes[product_name].append(write_node) + self.log.debug("Write... `{}`".format( + self._temp_nodes[product_name]) + ) # ---------- end nodes creation # ---------- render or save to nk @@ -1033,172 +1003,19 @@ class ExporterReviewMov(ExporterReview): self.log.debug("Representation... `{}`".format(self.data)) - self.clean_nodes(subset) + self.clean_nodes(product_name) nuke.scriptSave() return self.data - def _shift_to_previous_node_and_temp(self, subset, node, message): - self._temp_nodes[subset].append(node) + def _shift_to_previous_node_and_temp(self, product_name, node, message): + self._temp_nodes[product_name].append(node) self.previous_node = node - self.log.debug(message.format(self._temp_nodes[subset])) + self.log.debug(message.format(self._temp_nodes[product_name])) - def _connect_to_above_nodes(self, node, subset, message): + def _connect_to_above_nodes(self, node, product_name, message): node.setInput(0, self.previous_node) - self._shift_to_previous_node_and_temp(subset, node, message) - - -@deprecated("ayon_core.hosts.nuke.api.plugin.NukeWriteCreator") -class AbstractWriteRender(OpenPypeCreator): - """Abstract creator to gather similar implementation for Write creators""" - name = "" - label = "" - hosts = ["nuke"] - n_class = "Write" - family = "render" - icon = "sign-out" - defaults = ["Main", "Mask"] - knobs = [] - prenodes = {} - - def __init__(self, *args, **kwargs): - super(AbstractWriteRender, self).__init__(*args, **kwargs) - - data = OrderedDict() - - data["family"] = self.family - data["families"] = self.n_class - - for k, v in self.data.items(): - if k not in data.keys(): - data.update({k: v}) - - self.data = data - self.nodes = nuke.selectedNodes() - - def process(self): - - inputs = [] - outputs = [] - instance = nuke.toNode(self.data["subset"]) - selected_node = None - - # use selection - if (self.options or {}).get("useSelection"): - nodes = self.nodes - - if not (len(nodes) < 2): - msg = ("Select only one node. " - "The node you want to connect to, " - "or tick off `Use selection`") - self.log.error(msg) - nuke.message(msg) - return - - if len(nodes) == 0: - msg = ( - "No nodes selected. Please select a single node to connect" - " to or tick off `Use selection`" - ) - self.log.error(msg) - nuke.message(msg) - return - - selected_node = nodes[0] - inputs = [selected_node] - outputs = selected_node.dependent() - - if instance: - if (instance.name() in selected_node.name()): - selected_node = instance.dependencies()[0] - - # if node already exist - if instance: - # collect input / outputs - inputs = instance.dependencies() - outputs = instance.dependent() - selected_node = inputs[0] - # remove old one - nuke.delete(instance) - - # recreate new - write_data = { - "nodeclass": self.n_class, - "families": [self.family], - "avalon": self.data, - "subset": self.data["subset"], - "knobs": self.knobs - } - - # add creator data - creator_data = {"creator": self.__class__.__name__} - self.data.update(creator_data) - write_data.update(creator_data) - - write_node = self._create_write_node( - selected_node, - inputs, - outputs, - write_data - ) - - # relinking to collected connections - for i, input in enumerate(inputs): - write_node.setInput(i, input) - - write_node.autoplace() - - for output in outputs: - output.setInput(0, write_node) - - write_node = self._modify_write_node(write_node) - - return write_node - - def is_legacy(self): - """Check if it needs to run legacy code - - In case where `type` key is missing in single - knob it is legacy project anatomy. - - Returns: - bool: True if legacy - """ - imageio_nodes = get_nuke_imageio_settings()["nodes"] - node = imageio_nodes["requiredNodes"][0] - if "type" not in node["knobs"][0]: - # if type is not yet in project anatomy - return True - elif next(iter( - _k for _k in node["knobs"] - if _k.get("type") == "__legacy__" - ), None): - # in case someone re-saved anatomy - # with old configuration - return True - - @abstractmethod - def _create_write_node(self, selected_node, inputs, outputs, write_data): - """Family dependent implementation of Write node creation - - Args: - selected_node (nuke.Node) - inputs (list of nuke.Node) - input dependencies (what is connected) - outputs (list of nuke.Node) - output dependencies - write_data (dict) - values used to fill Knobs - Returns: - node (nuke.Node): group node with data as Knobs - """ - pass - - @abstractmethod - def _modify_write_node(self, write_node): - """Family dependent modification of created 'write_node' - - Returns: - node (nuke.Node): group node with data as Knobs - """ - pass + self._shift_to_previous_node_and_temp(product_name, node, message) def convert_to_valid_instaces(): @@ -1206,7 +1023,7 @@ def convert_to_valid_instaces(): Also save as new minor version of workfile. """ - def family_to_identifier(family): + def product_type_to_identifier(product_type): mapping = { "render": "create_write_render", "prerender": "create_write_prerender", @@ -1218,7 +1035,7 @@ def convert_to_valid_instaces(): "source": "create_source" } - return mapping[family] + return mapping[product_type] from ayon_core.hosts.nuke.api import workio @@ -1265,7 +1082,9 @@ def convert_to_valid_instaces(): if not avalon_knob_data: continue - if avalon_knob_data["id"] != "pyblish.avalon.instance": + if avalon_knob_data["id"] not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: continue transfer_data.update({ @@ -1275,7 +1094,10 @@ def convert_to_valid_instaces(): transfer_data["task"] = task_name - family = avalon_knob_data["family"] + product_type = avalon_knob_data.get("productType") + if product_type is None: + product_type = avalon_knob_data["family"] + # establish families families_ak = avalon_knob_data.get("families", []) @@ -1293,11 +1115,13 @@ def convert_to_valid_instaces(): node["publish"].value()) # add idetifier - transfer_data["creator_identifier"] = family_to_identifier(family) + transfer_data["creator_identifier"] = product_type_to_identifier( + product_type + ) # Add all nodes in group instances. if node.Class() == "Group": - # only alter families for render family + # only alter families for render product type if families_ak and "write" in families_ak.lower(): target = node["render"].value() if target == "Use existing frames": @@ -1348,7 +1172,9 @@ def _remove_old_knobs(node): def exposed_write_knobs(settings, plugin_name, instance_node): - exposed_knobs = settings["nuke"]["create"][plugin_name]["exposed_knobs"] + exposed_knobs = settings["nuke"]["create"][plugin_name].get( + "exposed_knobs", [] + ) if exposed_knobs: instance_node.addKnob(nuke.Text_Knob('', 'Write Knobs')) write_node = nuke.allNodes(group=instance_node, filter="Write")[0] diff --git a/client/ayon_core/hosts/nuke/api/workfile_template_builder.py b/client/ayon_core/hosts/nuke/api/workfile_template_builder.py index 4c15da983e..218ba97dd5 100644 --- a/client/ayon_core/hosts/nuke/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/nuke/api/workfile_template_builder.py @@ -7,7 +7,7 @@ from ayon_core.pipeline.workfile.workfile_template_builder import ( LoadPlaceholderItem, CreatePlaceholderItem, PlaceholderLoadMixin, - PlaceholderCreateMixin + PlaceholderCreateMixin, ) from ayon_core.tools.workfile_template_build import ( WorkfileBuildPlaceholderDialog, diff --git a/client/ayon_core/hosts/nuke/api/workio.py b/client/ayon_core/hosts/nuke/api/workio.py index 98e59eff71..b2445fd3d2 100644 --- a/client/ayon_core/hosts/nuke/api/workio.py +++ b/client/ayon_core/hosts/nuke/api/workio.py @@ -68,7 +68,7 @@ def current_file(): def work_root(session): - work_dir = session["AVALON_WORKDIR"] + work_dir = session["AYON_WORKDIR"] scene_dir = session.get("AVALON_SCENEDIR") if scene_dir: path = os.path.join(work_dir, scene_dir) diff --git a/client/ayon_core/hosts/nuke/plugins/create/convert_legacy.py b/client/ayon_core/hosts/nuke/plugins/create/convert_legacy.py index 815170ac8b..f113bec887 100644 --- a/client/ayon_core/hosts/nuke/plugins/create/convert_legacy.py +++ b/client/ayon_core/hosts/nuke/plugins/create/convert_legacy.py @@ -1,3 +1,4 @@ +from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID from ayon_core.pipeline.create.creator_plugins import SubsetConvertorPlugin from ayon_core.hosts.nuke.api.lib import ( INSTANCE_DATA_KNOB, @@ -34,7 +35,9 @@ class LegacyConverted(SubsetConvertorPlugin): if not avalon_knob_data: continue - if avalon_knob_data["id"] != "pyblish.avalon.instance": + if avalon_knob_data["id"] not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: continue # catch and break diff --git a/client/ayon_core/hosts/nuke/plugins/create/create_backdrop.py b/client/ayon_core/hosts/nuke/plugins/create/create_backdrop.py index 530392c635..cefd9501ec 100644 --- a/client/ayon_core/hosts/nuke/plugins/create/create_backdrop.py +++ b/client/ayon_core/hosts/nuke/plugins/create/create_backdrop.py @@ -12,7 +12,7 @@ class CreateBackdrop(NukeCreator): identifier = "create_backdrop" label = "Nukenodes (backdrop)" - family = "nukenodes" + product_type = "nukenodes" icon = "file-archive-o" maintain_selection = True @@ -38,12 +38,12 @@ class CreateBackdrop(NukeCreator): return created_node - def create(self, subset_name, instance_data, pre_create_data): - # make sure subset name is unique - self.check_existing_subset(subset_name) + def create(self, product_name, instance_data, pre_create_data): + # make sure product name is unique + self.check_existing_product(product_name) instance = super(CreateBackdrop, self).create( - subset_name, + product_name, instance_data, pre_create_data ) diff --git a/client/ayon_core/hosts/nuke/plugins/create/create_camera.py b/client/ayon_core/hosts/nuke/plugins/create/create_camera.py index 7ade19d846..764de84dcf 100644 --- a/client/ayon_core/hosts/nuke/plugins/create/create_camera.py +++ b/client/ayon_core/hosts/nuke/plugins/create/create_camera.py @@ -14,7 +14,7 @@ class CreateCamera(NukeCreator): identifier = "create_camera" label = "Camera (3d)" - family = "camera" + product_type = "camera" icon = "camera" # plugin attributes @@ -44,12 +44,12 @@ class CreateCamera(NukeCreator): return created_node - def create(self, subset_name, instance_data, pre_create_data): - # make sure subset name is unique - self.check_existing_subset(subset_name) + def create(self, product_name, instance_data, pre_create_data): + # make sure product name is unique + self.check_existing_product(product_name) instance = super(CreateCamera, self).create( - subset_name, + product_name, instance_data, pre_create_data ) diff --git a/client/ayon_core/hosts/nuke/plugins/create/create_gizmo.py b/client/ayon_core/hosts/nuke/plugins/create/create_gizmo.py index 51c5b1931b..ccc6aa13bd 100644 --- a/client/ayon_core/hosts/nuke/plugins/create/create_gizmo.py +++ b/client/ayon_core/hosts/nuke/plugins/create/create_gizmo.py @@ -11,7 +11,7 @@ class CreateGizmo(NukeCreator): identifier = "create_gizmo" label = "Gizmo (group)" - family = "gizmo" + product_type = "gizmo" icon = "file-archive-o" default_variants = ["ViewerInput", "Lut", "Effect"] @@ -42,12 +42,12 @@ class CreateGizmo(NukeCreator): return created_node - def create(self, subset_name, instance_data, pre_create_data): - # make sure subset name is unique - self.check_existing_subset(subset_name) + def create(self, product_name, instance_data, pre_create_data): + # make sure product name is unique + self.check_existing_product(product_name) instance = super(CreateGizmo, self).create( - subset_name, + product_name, instance_data, pre_create_data ) diff --git a/client/ayon_core/hosts/nuke/plugins/create/create_model.py b/client/ayon_core/hosts/nuke/plugins/create/create_model.py index db927171cd..507b7a1b57 100644 --- a/client/ayon_core/hosts/nuke/plugins/create/create_model.py +++ b/client/ayon_core/hosts/nuke/plugins/create/create_model.py @@ -11,7 +11,7 @@ class CreateModel(NukeCreator): identifier = "create_model" label = "Model (3d)" - family = "model" + product_type = "model" icon = "cube" default_variants = ["Main"] @@ -42,12 +42,12 @@ class CreateModel(NukeCreator): return created_node - def create(self, subset_name, instance_data, pre_create_data): - # make sure subset name is unique - self.check_existing_subset(subset_name) + def create(self, product_name, instance_data, pre_create_data): + # make sure product name is unique + self.check_existing_product(product_name) instance = super(CreateModel, self).create( - subset_name, + product_name, instance_data, pre_create_data ) diff --git a/client/ayon_core/hosts/nuke/plugins/create/create_source.py b/client/ayon_core/hosts/nuke/plugins/create/create_source.py index be9fa44929..ac6b8f694b 100644 --- a/client/ayon_core/hosts/nuke/plugins/create/create_source.py +++ b/client/ayon_core/hosts/nuke/plugins/create/create_source.py @@ -17,7 +17,7 @@ class CreateSource(NukeCreator): identifier = "create_source" label = "Source (read)" - family = "source" + product_type = "source" icon = "film" default_variants = ["Effect", "Backplate", "Fire", "Smoke"] @@ -35,7 +35,7 @@ class CreateSource(NukeCreator): return read_node - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): # make sure selected nodes are added self.set_selected_nodes(pre_create_data) @@ -46,18 +46,18 @@ class CreateSource(NukeCreator): continue node_name = read_node.name() - _subset_name = subset_name + node_name + _product_name = product_name + node_name - # make sure subset name is unique - self.check_existing_subset(_subset_name) + # make sure product name is unique + self.check_existing_product(_product_name) instance_node = self.create_instance_node( - _subset_name, + _product_name, read_node ) instance = CreatedInstance( - self.family, - _subset_name, + self.product_type, + _product_name, instance_data, self ) diff --git a/client/ayon_core/hosts/nuke/plugins/create/create_write_image.py b/client/ayon_core/hosts/nuke/plugins/create/create_write_image.py index 125cf057f8..770726e34f 100644 --- a/client/ayon_core/hosts/nuke/plugins/create/create_write_image.py +++ b/client/ayon_core/hosts/nuke/plugins/create/create_write_image.py @@ -18,7 +18,7 @@ from ayon_core.hosts.nuke.api.plugin import exposed_write_knobs class CreateWriteImage(napi.NukeWriteCreator): identifier = "create_write_image" label = "Image (write)" - family = "image" + product_type = "image" icon = "sign-out" instance_attributes = [ @@ -64,18 +64,18 @@ class CreateWriteImage(napi.NukeWriteCreator): default=nuke.frame() ) - def create_instance_node(self, subset_name, instance_data): + def create_instance_node(self, product_name, instance_data): # add fpath_template write_data = { "creator": self.__class__.__name__, - "subset": subset_name, + "productName": product_name, "fpath_template": self.temp_rendering_path_template } write_data.update(instance_data) created_node = napi.create_write_node( - subset_name, + product_name, write_data, input=self.selected_node, prenodes=self.prenodes, @@ -91,8 +91,8 @@ class CreateWriteImage(napi.NukeWriteCreator): return created_node - def create(self, subset_name, instance_data, pre_create_data): - subset_name = subset_name.format(**pre_create_data) + def create(self, product_name, instance_data, pre_create_data): + product_name = product_name.format(**pre_create_data) # pass values from precreate to instance self.pass_pre_attributes_to_instance( @@ -107,18 +107,18 @@ class CreateWriteImage(napi.NukeWriteCreator): # make sure selected nodes are added self.set_selected_nodes(pre_create_data) - # make sure subset name is unique - self.check_existing_subset(subset_name) + # make sure product name is unique + self.check_existing_product(product_name) instance_node = self.create_instance_node( - subset_name, + product_name, instance_data, ) try: instance = CreatedInstance( - self.family, - subset_name, + self.product_type, + product_name, instance_data, self ) diff --git a/client/ayon_core/hosts/nuke/plugins/create/create_write_prerender.py b/client/ayon_core/hosts/nuke/plugins/create/create_write_prerender.py index 371ef85a15..96ac2fac9c 100644 --- a/client/ayon_core/hosts/nuke/plugins/create/create_write_prerender.py +++ b/client/ayon_core/hosts/nuke/plugins/create/create_write_prerender.py @@ -15,7 +15,7 @@ from ayon_core.hosts.nuke.api.plugin import exposed_write_knobs class CreateWritePrerender(napi.NukeWriteCreator): identifier = "create_write_prerender" label = "Prerender (write)" - family = "prerender" + product_type = "prerender" icon = "sign-out" instance_attributes = [ @@ -45,11 +45,11 @@ class CreateWritePrerender(napi.NukeWriteCreator): ] return attr_defs - def create_instance_node(self, subset_name, instance_data): + def create_instance_node(self, product_name, instance_data): # add fpath_template write_data = { "creator": self.__class__.__name__, - "subset": subset_name, + "productName": product_name, "fpath_template": self.temp_rendering_path_template } @@ -64,7 +64,7 @@ class CreateWritePrerender(napi.NukeWriteCreator): width, height = (actual_format.width(), actual_format.height()) created_node = napi.create_write_node( - subset_name, + product_name, write_data, input=self.selected_node, prenodes=self.prenodes, @@ -81,7 +81,7 @@ class CreateWritePrerender(napi.NukeWriteCreator): return created_node - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): # pass values from precreate to instance self.pass_pre_attributes_to_instance( instance_data, @@ -94,18 +94,18 @@ class CreateWritePrerender(napi.NukeWriteCreator): # make sure selected nodes are added self.set_selected_nodes(pre_create_data) - # make sure subset name is unique - self.check_existing_subset(subset_name) + # make sure product name is unique + self.check_existing_product(product_name) instance_node = self.create_instance_node( - subset_name, + product_name, instance_data ) try: instance = CreatedInstance( - self.family, - subset_name, + self.product_type, + product_name, instance_data, self ) diff --git a/client/ayon_core/hosts/nuke/plugins/create/create_write_render.py b/client/ayon_core/hosts/nuke/plugins/create/create_write_render.py index c5f4d5003a..24bddb3d26 100644 --- a/client/ayon_core/hosts/nuke/plugins/create/create_write_render.py +++ b/client/ayon_core/hosts/nuke/plugins/create/create_write_render.py @@ -15,7 +15,7 @@ from ayon_core.hosts.nuke.api.plugin import exposed_write_knobs class CreateWriteRender(napi.NukeWriteCreator): identifier = "create_write_render" label = "Render (write)" - family = "render" + product_type = "render" icon = "sign-out" instance_attributes = [ @@ -39,11 +39,11 @@ class CreateWriteRender(napi.NukeWriteCreator): ] return attr_defs - def create_instance_node(self, subset_name, instance_data): + def create_instance_node(self, product_name, instance_data): # add fpath_template write_data = { "creator": self.__class__.__name__, - "subset": subset_name, + "productName": product_name, "fpath_template": self.temp_rendering_path_template } @@ -61,7 +61,7 @@ class CreateWriteRender(napi.NukeWriteCreator): self.log.debug(">>>>>>> : {}".format(self.get_linked_knobs())) created_node = napi.create_write_node( - subset_name, + product_name, write_data, input=self.selected_node, prenodes=self.prenodes, @@ -76,7 +76,7 @@ class CreateWriteRender(napi.NukeWriteCreator): return created_node - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): # pass values from precreate to instance self.pass_pre_attributes_to_instance( instance_data, @@ -88,18 +88,18 @@ class CreateWriteRender(napi.NukeWriteCreator): # make sure selected nodes are added self.set_selected_nodes(pre_create_data) - # make sure subset name is unique - self.check_existing_subset(subset_name) + # make sure product name is unique + self.check_existing_product(product_name) instance_node = self.create_instance_node( - subset_name, + product_name, instance_data ) try: instance = CreatedInstance( - self.family, - subset_name, + self.product_type, + product_name, instance_data, self ) diff --git a/client/ayon_core/hosts/nuke/plugins/create/workfile_creator.py b/client/ayon_core/hosts/nuke/plugins/create/workfile_creator.py index 87f62b011e..0a0467787a 100644 --- a/client/ayon_core/hosts/nuke/plugins/create/workfile_creator.py +++ b/client/ayon_core/hosts/nuke/plugins/create/workfile_creator.py @@ -13,7 +13,7 @@ import nuke class WorkfileCreator(AutoCreator): identifier = "workfile" - family = "workfile" + product_type = "workfile" default_variant = "Main" @@ -32,9 +32,12 @@ class WorkfileCreator(AutoCreator): host_name = self.create_context.host_name asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - self.default_variant, task_name, asset_doc, - project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, ) instance_data.update({ "asset": asset_name, @@ -42,12 +45,16 @@ class WorkfileCreator(AutoCreator): "variant": self.default_variant }) instance_data.update(self.get_dynamic_data( - self.default_variant, task_name, asset_doc, - project_name, host_name, instance_data + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, + instance_data )) instance = CreatedInstance( - self.family, subset_name, instance_data, self + self.product_type, product_name, instance_data, self ) instance.transient_data["node"] = root_node self._add_instance_to_context(instance) diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_backdrop.py b/client/ayon_core/hosts/nuke/plugins/load/load_backdrop.py index ed512c86ab..642e20c979 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_backdrop.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_backdrop.py @@ -178,7 +178,7 @@ class LoadBackdropNodes(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -189,13 +189,14 @@ class LoadBackdropNodes(load.LoaderPlugin): # get main variables # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + project_name = context["project"]["name"] + version_doc = context["version"] + repre_doc = context["representation"] # get corresponding node GN = container["node"] - file = get_representation_path(representation).replace("\\", "/") + file = get_representation_path(repre_doc).replace("\\", "/") name = container['name'] version_data = version_doc.get("data", {}) @@ -207,7 +208,7 @@ class LoadBackdropNodes(load.LoaderPlugin): add_keys = ["source", "author", "fps"] data_imprint = { - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), "version": vname, "colorspaceInput": colorspace, } @@ -248,8 +249,8 @@ class LoadBackdropNodes(load.LoaderPlugin): return update_container(GN, data_imprint) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = container["node"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_camera_abc.py b/client/ayon_core/hosts/nuke/plugins/load/load_camera_abc.py index 2b2fb6f938..e3511a4e8b 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_camera_abc.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_camera_abc.py @@ -92,7 +92,7 @@ class AlembicCameraLoader(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """ Called by Scene Inventory when look should be updated to current version. @@ -109,10 +109,8 @@ class AlembicCameraLoader(load.LoaderPlugin): None """ # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) - - object_name = container["node"] + version_doc = context["version"] + repre_doc = context["representation"] # get main variables version_data = version_doc.get("data", {}) @@ -126,7 +124,7 @@ class AlembicCameraLoader(load.LoaderPlugin): add_keys = ["source", "author", "fps"] data_imprint = { - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), "frameStart": first, "frameEnd": last, "version": vname @@ -136,10 +134,10 @@ class AlembicCameraLoader(load.LoaderPlugin): data_imprint.update({k: version_data[k]}) # getting file path - file = get_representation_path(representation).replace("\\", "/") + file = get_representation_path(repre_doc).replace("\\", "/") with maintained_selection(): - camera_node = nuke.toNode(object_name) + camera_node = container["node"] camera_node['selected'].setValue(True) # collect input output dependencies @@ -154,9 +152,10 @@ class AlembicCameraLoader(load.LoaderPlugin): xpos = camera_node.xpos() ypos = camera_node.ypos() nuke.nodeCopy("%clipboard%") + camera_name = camera_node.name() nuke.delete(camera_node) nuke.nodePaste("%clipboard%") - camera_node = nuke.toNode(object_name) + camera_node = nuke.toNode(camera_name) camera_node.setXYpos(xpos, ypos) # link to original input nodes @@ -192,8 +191,8 @@ class AlembicCameraLoader(load.LoaderPlugin): color_value = "0xd88467ff" node["tile_color"].setValue(int(color_value, 16)) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = container["node"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_clip.py b/client/ayon_core/hosts/nuke/plugins/load/load_clip.py index 8bce2eac6e..e9e71baa76 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_clip.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_clip.py @@ -53,7 +53,7 @@ class LoadClip(plugin.NukeLoader): color = "white" # Loaded from settings - _representations = [] + representations_include = [] script_start = int(nuke.root()["first_frame"].value()) @@ -82,7 +82,7 @@ class LoadClip(plugin.NukeLoader): @classmethod def get_representations(cls): - return cls._representations or cls.representations + return cls.representations_include or cls.representations def load(self, context, name, namespace, options): """Load asset via database @@ -209,8 +209,8 @@ class LoadClip(plugin.NukeLoader): return container - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def _representation_with_hash_in_frame(self, representation): """Convert frame key value to padded hash @@ -241,7 +241,7 @@ class LoadClip(plugin.NukeLoader): representation["context"]["frame"] = hashed_frame return representation - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -250,16 +250,18 @@ class LoadClip(plugin.NukeLoader): """ - is_sequence = len(representation["files"]) > 1 + repre_doc = context["representation"] + + is_sequence = len(repre_doc["files"]) > 1 read_node = container["node"] if is_sequence: - representation = self._representation_with_hash_in_frame( - representation + repre_doc = self._representation_with_hash_in_frame( + repre_doc ) - filepath = get_representation_path(representation).replace("\\", "/") + filepath = get_representation_path(repre_doc).replace("\\", "/") self.log.debug("_ filepath: {}".format(filepath)) start_at_workfile = "start at" in read_node['frame_mode'].value() @@ -270,13 +272,13 @@ class LoadClip(plugin.NukeLoader): ] project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + version_doc = get_version_by_id(project_name, repre_doc["parent"]) version_data = version_doc.get("data", {}) - repre_id = representation["_id"] + repre_id = repre_doc["_id"] # colorspace profile - colorspace = representation["data"].get("colorspace") + colorspace = repre_doc["data"].get("colorspace") colorspace = colorspace or version_data.get("colorspace") self.handle_start = version_data.get("handleStart", 0) @@ -303,12 +305,12 @@ class LoadClip(plugin.NukeLoader): # we will switch off undo-ing with viewer_update_and_undo_stop(): used_colorspace = self._set_colorspace( - read_node, version_data, representation["data"], filepath) + read_node, version_data, repre_doc["data"], filepath) self._set_range_to_node(read_node, first, last, start_at_workfile) updated_dict = { - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), "frameStart": str(first), "frameEnd": str(last), "version": str(version_doc.get("name")), @@ -457,7 +459,7 @@ class LoadClip(plugin.NukeLoader): colorspace = repre_data.get("colorspace") colorspace = colorspace or version_data.get("colorspace") - # colorspace from `project_settings/nuke/imageio/regexInputs` + # colorspace from `project_settings/nuke/imageio/regex_inputs` iio_colorspace = get_imageio_input_colorspace(path) # Set colorspace defined in version data diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_effects.py b/client/ayon_core/hosts/nuke/plugins/load/load_effects.py index 0b5f31033e..3e87c9cf60 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_effects.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_effects.py @@ -146,7 +146,7 @@ class LoadEffects(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -156,13 +156,14 @@ class LoadEffects(load.LoaderPlugin): """ # get main variables # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + project_name = context["project"]["name"] + version_doc = context["version"] + repre_doc = context["representation"] # get corresponding node GN = container["node"] - file = get_representation_path(representation).replace("\\", "/") + file = get_representation_path(repre_doc).replace("\\", "/") name = container['name'] version_data = version_doc.get("data", {}) vname = version_doc.get("name", None) @@ -177,7 +178,7 @@ class LoadEffects(load.LoaderPlugin): "source", "author", "fps"] data_imprint = { - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), "frameStart": first, "frameEnd": last, "version": vname, @@ -344,8 +345,8 @@ class LoadEffects(load.LoaderPlugin): else: return input - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = container["node"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_effects_ip.py b/client/ayon_core/hosts/nuke/plugins/load/load_effects_ip.py index 4d8a8518f2..5c363cddc4 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_effects_ip.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_effects_ip.py @@ -150,7 +150,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -161,13 +161,14 @@ class LoadEffectsInputProcess(load.LoaderPlugin): # get main variables # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + project_name = context["project"]["name"] + version_doc = context["version"] + repre_doc = context["representation"] # get corresponding node GN = container["node"] - file = get_representation_path(representation).replace("\\", "/") + file = get_representation_path(repre_doc).replace("\\", "/") version_data = version_doc.get("data", {}) vname = version_doc.get("name", None) first = version_data.get("frameStart", None) @@ -179,7 +180,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin): "source", "author", "fps"] data_imprint = { - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), "frameStart": first, "frameEnd": last, "version": vname, @@ -355,8 +356,8 @@ class LoadEffectsInputProcess(load.LoaderPlugin): else: return input - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = container["node"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_gizmo.py b/client/ayon_core/hosts/nuke/plugins/load/load_gizmo.py index 54daa74405..058228a145 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_gizmo.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_gizmo.py @@ -97,7 +97,7 @@ class LoadGizmo(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -108,13 +108,14 @@ class LoadGizmo(load.LoaderPlugin): # get main variables # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + project_name = context["project"]["name"] + version_doc = context["version"] + repre_doc = context["representation"] # get corresponding node group_node = container["node"] - file = get_representation_path(representation).replace("\\", "/") + file = get_representation_path(repre_doc).replace("\\", "/") name = container['name'] version_data = version_doc.get("data", {}) vname = version_doc.get("name", None) @@ -128,7 +129,7 @@ class LoadGizmo(load.LoaderPlugin): "source", "author", "fps"] data_imprint = { - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), "frameStart": first, "frameEnd": last, "version": vname, @@ -173,8 +174,8 @@ class LoadGizmo(load.LoaderPlugin): return update_container(new_group_node, data_imprint) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = container["node"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_gizmo_ip.py b/client/ayon_core/hosts/nuke/plugins/load/load_gizmo_ip.py index 677d9868f1..61e1c34028 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_gizmo_ip.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_gizmo_ip.py @@ -104,7 +104,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -115,13 +115,14 @@ class LoadGizmoInputProcess(load.LoaderPlugin): # get main variables # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + project_name = context["project"]["name"] + version_doc = context["version"] + repre_doc = context["representation"] # get corresponding node group_node = container["node"] - file = get_representation_path(representation).replace("\\", "/") + file = get_representation_path(repre_doc).replace("\\", "/") name = container['name'] version_data = version_doc.get("data", {}) vname = version_doc.get("name", None) @@ -135,7 +136,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin): "source", "author", "fps"] data_imprint = { - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), "frameStart": first, "frameEnd": last, "version": vname, @@ -254,8 +255,8 @@ class LoadGizmoInputProcess(load.LoaderPlugin): else: return input - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = container["node"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_image.py b/client/ayon_core/hosts/nuke/plugins/load/load_image.py index b9f47bddc9..4f7a5ccc27 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_image.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_image.py @@ -47,7 +47,7 @@ class LoadImage(load.LoaderPlugin): color = "white" # Loaded from settings - _representations = [] + representations_include = [] node_name_template = "{class_name}_{ext}" @@ -64,7 +64,7 @@ class LoadImage(load.LoaderPlugin): @classmethod def get_representations(cls): - return cls._representations or cls.representations + return cls.representations_include or cls.representations def load(self, context, name, namespace, options): self.log.info("__ options: `{}`".format(options)) @@ -155,10 +155,10 @@ class LoadImage(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -171,12 +171,16 @@ class LoadImage(load.LoaderPlugin): assert node.Class() == "Read", "Must be Read" - repr_cont = representation["context"] + project_name = context["project"]["name"] + version_doc = context["version"] + repre_doc = context["representation"] - file = get_representation_path(representation) + repr_cont = repre_doc["context"] + + file = get_representation_path(repre_doc) if not file: - repr_id = representation["_id"] + repr_id = repre_doc["_id"] self.log.warning( "Representation id `{}` is failing to load".format(repr_id)) return @@ -191,8 +195,6 @@ class LoadImage(load.LoaderPlugin): format(frame_number, "0{}".format(padding))) # Get start frame from version data - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) last_version_doc = get_last_version_by_subset_id( project_name, version_doc["parent"], fields=["_id"] ) @@ -210,7 +212,7 @@ class LoadImage(load.LoaderPlugin): updated_dict = {} updated_dict.update({ - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), "frameStart": str(first), "frameEnd": str(last), "version": str(version_doc.get("name")), diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_model.py b/client/ayon_core/hosts/nuke/plugins/load/load_model.py index 125cb28e27..cd4b72df91 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_model.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_model.py @@ -96,7 +96,7 @@ class AlembicModelLoader(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """ Called by Scene Inventory when look should be updated to current version. @@ -106,15 +106,15 @@ class AlembicModelLoader(load.LoaderPlugin): Args: container: object that has look to be updated - representation: (dict): relationship data to get proper + context: (dict): relationship data to get proper representation from DB and persisted data in .json Returns: None """ # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + version_doc = context["version"] + repre_doc = context["representation"] # get corresponding node model_node = container["node"] @@ -131,7 +131,7 @@ class AlembicModelLoader(load.LoaderPlugin): add_keys = ["source", "author", "fps"] data_imprint = { - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), "frameStart": first, "frameEnd": last, "version": vname @@ -141,7 +141,7 @@ class AlembicModelLoader(load.LoaderPlugin): data_imprint.update({k: version_data[k]}) # getting file path - file = get_representation_path(representation).replace("\\", "/") + file = get_representation_path(repre_doc).replace("\\", "/") with maintained_selection(): model_node['selected'].setValue(True) @@ -202,8 +202,8 @@ class AlembicModelLoader(load.LoaderPlugin): color_value = "0xd88467ff" node["tile_color"].setValue(int(color_value, 16)) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = nuke.toNode(container['objectName']) diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_ociolook.py b/client/ayon_core/hosts/nuke/plugins/load/load_ociolook.py index e168c2bac1..e2e7cd3262 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_ociolook.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_ociolook.py @@ -219,14 +219,13 @@ class LoadOcioLookNodes(load.LoaderPlugin): return group_node - def update(self, container, representation): - - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + def update(self, container, context): + version_doc = context["version"] + repre_doc = context["representation"] group_node = container["node"] - filepath = get_representation_path(representation) + filepath = get_representation_path(repre_doc) json_f = self._load_json_data(filepath) @@ -242,7 +241,7 @@ class LoadOcioLookNodes(load.LoaderPlugin): group_node["name"].value())) return update_container( - group_node, {"representation": str(representation["_id"])}) + group_node, {"representation": str(repre_doc["_id"])}) def _load_json_data(self, filepath): # getting data from json file with unicode conversion @@ -280,8 +279,8 @@ class LoadOcioLookNodes(load.LoaderPlugin): else: return input - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = nuke.toNode(container['objectName']) diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_script_precomp.py b/client/ayon_core/hosts/nuke/plugins/load/load_script_precomp.py index 1c91e51a09..5d62a7ca0f 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_script_precomp.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_script_precomp.py @@ -104,10 +104,10 @@ class LinkAsGroup(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -117,11 +117,13 @@ class LinkAsGroup(load.LoaderPlugin): """ node = container["node"] - root = get_representation_path(representation).replace("\\", "/") + project_name = context["project"]["name"] + version_doc = context["version"] + repre_doc = context["representation"] + + root = get_representation_path(repre_doc).replace("\\", "/") # Get start frame from version data - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) last_version_doc = get_last_version_by_subset_id( project_name, version_doc["parent"], fields=["_id"] ) @@ -129,7 +131,7 @@ class LinkAsGroup(load.LoaderPlugin): updated_dict = {} version_data = version_doc["data"] updated_dict.update({ - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), "frameEnd": version_data.get("frameEnd"), "version": version_doc.get("name"), "colorspace": version_data.get("colorspace"), diff --git a/client/ayon_core/hosts/nuke/plugins/publish/collect_gizmo.py b/client/ayon_core/hosts/nuke/plugins/publish/collect_gizmo.py index c410de7c32..fda1c7ac31 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/collect_gizmo.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/collect_gizmo.py @@ -15,8 +15,8 @@ class CollectGizmo(pyblish.api.InstancePlugin): gizmo_node = instance.data["transientData"]["node"] - # add family to familiess - instance.data["families"].insert(0, instance.data["family"]) + # add product type to familiess + instance.data["families"].insert(0, instance.data["productType"]) # make label nicer instance.data["label"] = gizmo_node.name() @@ -25,6 +25,7 @@ class CollectGizmo(pyblish.api.InstancePlugin): handle_end = instance.context.data["handleEnd"] first_frame = int(nuke.root()["first_frame"].getValue()) last_frame = int(nuke.root()["last_frame"].getValue()) + families = [instance.data["productType"]] + instance.data["families"] # Add version data to instance version_data = { @@ -33,8 +34,8 @@ class CollectGizmo(pyblish.api.InstancePlugin): "frameStart": first_frame + handle_start, "frameEnd": last_frame - handle_end, "colorspace": nuke.root().knob('workingSpaceLUT').value(), - "families": [instance.data["family"]] + instance.data["families"], - "subset": instance.data["subset"], + "families": families, + "productName": instance.data["productName"], "fps": instance.context.data["fps"] } diff --git a/client/ayon_core/hosts/nuke/plugins/publish/collect_model.py b/client/ayon_core/hosts/nuke/plugins/publish/collect_model.py index a099f06be0..1a2bc9c019 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/collect_model.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/collect_model.py @@ -15,8 +15,8 @@ class CollectModel(pyblish.api.InstancePlugin): geo_node = instance.data["transientData"]["node"] - # add family to familiess - instance.data["families"].insert(0, instance.data["family"]) + # add product type to familiess + instance.data["families"].insert(0, instance.data["productType"]) # make label nicer instance.data["label"] = geo_node.name() @@ -25,7 +25,7 @@ class CollectModel(pyblish.api.InstancePlugin): handle_end = instance.context.data["handleEnd"] first_frame = int(nuke.root()["first_frame"].getValue()) last_frame = int(nuke.root()["last_frame"].getValue()) - + families = [instance.data["productType"]] + instance.data["families"] # Add version data to instance version_data = { "handleStart": handle_start, @@ -33,8 +33,8 @@ class CollectModel(pyblish.api.InstancePlugin): "frameStart": first_frame + handle_start, "frameEnd": last_frame - handle_end, "colorspace": nuke.root().knob('workingSpaceLUT').value(), - "families": [instance.data["family"]] + instance.data["families"], - "subset": instance.data["subset"], + "families": families, + "productName": instance.data["productName"], "fps": instance.context.data["fps"] } diff --git a/client/ayon_core/hosts/nuke/plugins/publish/collect_nuke_instance_data.py b/client/ayon_core/hosts/nuke/plugins/publish/collect_nuke_instance_data.py index 449a1cc935..951072ff3f 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/collect_nuke_instance_data.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/collect_nuke_instance_data.py @@ -15,7 +15,7 @@ class CollectInstanceData(pyblish.api.InstancePlugin): sync_workfile_version_on_families = [] def process(self, instance): - family = instance.data["family"] + product_type = instance.data["productType"] # Get format root = nuke.root() @@ -25,10 +25,10 @@ class CollectInstanceData(pyblish.api.InstancePlugin): pixel_aspect = format_.pixelAspect() # sync workfile version - if family in self.sync_workfile_version_on_families: + if product_type in self.sync_workfile_version_on_families: self.log.debug( "Syncing version with workfile for '{}'".format( - family + product_type ) ) # get version to instance for integration diff --git a/client/ayon_core/hosts/nuke/plugins/publish/collect_reads.py b/client/ayon_core/hosts/nuke/plugins/publish/collect_reads.py index 38938a3dda..af17933eb1 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/collect_reads.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/collect_reads.py @@ -99,8 +99,8 @@ class CollectNukeReads(pyblish.api.InstancePlugin): "frameStart": first_frame + handle_start, "frameEnd": last_frame - handle_end, "colorspace": colorspace, - "families": [instance.data["family"]], - "subset": instance.data["subset"], + "families": [instance.data["productType"]], + "productName": instance.data["productName"], "fps": instance.context.data["fps"] } diff --git a/client/ayon_core/hosts/nuke/plugins/publish/collect_writes.py b/client/ayon_core/hosts/nuke/plugins/publish/collect_writes.py index 84dc7992a5..58afb2cd1f 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/collect_writes.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/collect_writes.py @@ -138,14 +138,14 @@ class CollectNukeWrites(pyblish.api.InstancePlugin, render_target (str): render target colorspace (str): colorspace """ - family = instance.data["family"] + product_type = instance.data["productType"] # add targeted family to families instance.data["families"].append( - "{}.{}".format(family, render_target) + "{}.{}".format(product_type, render_target) ) self.log.debug("Appending render target to families: {}.{}".format( - family, render_target) + product_type, render_target) ) write_node = self._write_node_helper(instance) @@ -175,7 +175,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin, "colorspace": colorspace }) - if family == "render": + if product_type == "render": instance.data.update({ "handleStart": handle_start, "handleEnd": handle_end, diff --git a/client/ayon_core/hosts/nuke/plugins/publish/extract_camera.py b/client/ayon_core/hosts/nuke/plugins/publish/extract_camera.py index 810a2e0a76..1f5a8c73e1 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/extract_camera.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/extract_camera.py @@ -38,7 +38,7 @@ class ExtractCamera(publish.Extractor): rm_nodes = [] self.log.debug("Creating additional nodes for 3D Camera Extractor") - subset = instance.data["subset"] + product_name = instance.data["productName"] staging_dir = self.staging_dir(instance) # get extension form preset @@ -50,7 +50,7 @@ class ExtractCamera(publish.Extractor): "Talk to your supervisor or pipeline admin") # create file name and path - filename = subset + ".{}".format(extension) + filename = product_name + ".{}".format(extension) file_path = os.path.join(staging_dir, filename).replace("\\", "/") with maintained_selection(): diff --git a/client/ayon_core/hosts/nuke/plugins/publish/extract_model.py b/client/ayon_core/hosts/nuke/plugins/publish/extract_model.py index 6f35e95630..36896fe595 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/extract_model.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/extract_model.py @@ -40,7 +40,7 @@ class ExtractModel(publish.Extractor): model_node = instance.data["transientData"]["node"] self.log.debug("Creating additional nodes for Extract Model") - subset = instance.data["subset"] + product_name = instance.data["productName"] staging_dir = self.staging_dir(instance) extension = next((k[1] for k in self.write_geo_knobs @@ -51,7 +51,7 @@ class ExtractModel(publish.Extractor): "Talk to your supervisor or pipeline admin") # create file name and path - filename = subset + ".{}".format(extension) + filename = product_name + ".{}".format(extension) file_path = os.path.join(staging_dir, filename).replace("\\", "/") with maintained_selection(): diff --git a/client/ayon_core/hosts/nuke/plugins/publish/extract_render_local.py b/client/ayon_core/hosts/nuke/plugins/publish/extract_render_local.py index 45514ede5e..c8be2a5564 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/extract_render_local.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/extract_render_local.py @@ -38,7 +38,7 @@ class NukeRenderLocal(publish.Extractor, self.log.debug("instance collected: {}".format(instance.data)) - node_subset_name = instance.data.get("name", None) + node_product_name = instance.data.get("name", None) first_frame = instance.data.get("frameStartHandle", None) last_frame = instance.data.get("frameEndHandle", None) @@ -80,7 +80,7 @@ class NukeRenderLocal(publish.Extractor, # Render frames nuke.execute( - str(node_subset_name), + str(node_product_name), int(render_first_frame), int(render_last_frame) ) @@ -125,21 +125,28 @@ class NukeRenderLocal(publish.Extractor, )) families = instance.data["families"] + anatomy_data = instance.data["anatomyData"] # redefinition of families if "render.local" in families: - instance.data['family'] = 'render' - families.remove('render.local') + instance.data["family"] = "render" + instance.data["productType"] = "render" + families.remove("render.local") families.insert(0, "render2d") - instance.data["anatomyData"]["family"] = "render" + anatomy_data["family"] = "render" + anatomy_data["product"]["type"] = "render" elif "prerender.local" in families: - instance.data['family'] = 'prerender' - families.remove('prerender.local') + instance.data["family"] = "prerender" + instance.data["productType"] = "prerender" + families.remove("prerender.local") families.insert(0, "prerender") - instance.data["anatomyData"]["family"] = "prerender" + anatomy_data["family"] = "prerender" + anatomy_data["product"]["type"] = "prerender" elif "image.local" in families: - instance.data['family'] = 'image' - families.remove('image.local') - instance.data["anatomyData"]["family"] = "image" + instance.data["family"] = "image" + instance.data["productType"] = "image" + families.remove("image.local") + anatomy_data["family"] = "image" + anatomy_data["product"]["type"] = "image" instance.data["families"] = families collections, remainder = clique.assemble(filenames) @@ -160,7 +167,7 @@ class NukeRenderLocal(publish.Extractor, These are base of files which will be extended/fixed for specific frames. Renames published file to expected file name based on frame, eg. - test_project_test_asset_subset_v005.1001.exr > new_render.1001.exr + test_project_test_asset_product_v005.1001.exr > new_render.1001.exr """ last_published = instance.data["last_version_published_files"] last_published_and_frames = collect_frames(last_published) diff --git a/client/ayon_core/hosts/nuke/plugins/publish/extract_review_intermediates.py b/client/ayon_core/hosts/nuke/plugins/publish/extract_review_intermediates.py index a00c1c593f..8ac07c641c 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/extract_review_intermediates.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/extract_review_intermediates.py @@ -50,13 +50,14 @@ class ExtractReviewIntermediates(publish.Extractor): cls.outputs = current_setting["outputs"] def process(self, instance): + # TODO 'families' should not be included for filtering of outputs families = set(instance.data["families"]) - # add main family to make sure all families are compared - families.add(instance.data["family"]) + # Add product type to families + families.add(instance.data["productType"]) task_type = instance.context.data["taskType"] - subset = instance.data["subset"] + product_name = instance.data["productName"] self.log.debug("Creating staging dir...") if "representations" not in instance.data: @@ -75,29 +76,33 @@ class ExtractReviewIntermediates(publish.Extractor): # generate data with maintained_selection(): generated_repres = [] - for o_name, o_data in self.outputs.items(): + for o_data in self.outputs: + o_name = o_data["name"] self.log.debug( "o_name: {}, o_data: {}".format(o_name, pformat(o_data))) - f_families = o_data["filter"]["families"] + f_product_types = o_data["filter"]["product_types"] f_task_types = o_data["filter"]["task_types"] - f_subsets = o_data["filter"]["subsets"] + product_names = o_data["filter"]["product_names"] self.log.debug( - "f_families `{}` > families: {}".format( - f_families, families)) + "f_product_types `{}` > families: {}".format( + f_product_types, families)) self.log.debug( "f_task_types `{}` > task_type: {}".format( f_task_types, task_type)) self.log.debug( - "f_subsets `{}` > subset: {}".format( - f_subsets, subset)) + "product_names `{}` > product: {}".format( + product_names, product_name)) # test if family found in context # using intersection to make sure all defined # families are present in combination - if f_families and not families.intersection(f_families): + if ( + f_product_types + and not families.intersection(f_product_types) + ): continue # test task types from filter @@ -105,8 +110,9 @@ class ExtractReviewIntermediates(publish.Extractor): continue # test subsets from filter - if f_subsets and not any( - re.search(s, subset) for s in f_subsets): + if product_names and not any( + re.search(p, product_name) for p in product_names + ): continue self.log.debug( @@ -117,7 +123,7 @@ class ExtractReviewIntermediates(publish.Extractor): # check if settings have more then one preset # so we dont need to add outputName to representation # in case there is only one preset - multiple_presets = len(self.outputs.keys()) > 1 + multiple_presets = len(self.outputs) > 1 # adding bake presets to instance data for other plugins if not instance.data.get("bakePresets"): diff --git a/client/ayon_core/hosts/nuke/plugins/publish/extract_slate_frame.py b/client/ayon_core/hosts/nuke/plugins/publish/extract_slate_frame.py index 0c4823b1aa..c013da84d2 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -29,9 +29,15 @@ class ExtractSlateFrame(publish.Extractor): # Settings values key_value_mapping = { - "f_submission_note": [True, "{comment}"], - "f_submitting_for": [True, "{intent[value]}"], - "f_vfx_scope_of_work": [False, ""] + "f_submission_note": { + "enabled": True, "template": "{comment}" + }, + "f_submitting_for": { + "enabled": True, "template": "{intent[value]}" + }, + "f_vfx_scope_of_work": { + "enabled": False, "template": "" + } } def process(self, instance): @@ -109,7 +115,7 @@ class ExtractSlateFrame(publish.Extractor): """Slate frame renderer Args: - instance (PyblishInstance): Pyblish instance with subset data + instance (PyblishInstance): Pyblish instance with product data output_name (str, optional): Slate variation name. Defaults to None. bake_viewer_process (bool, optional): @@ -316,11 +322,11 @@ class ExtractSlateFrame(publish.Extractor): }) for key, _values in self.key_value_mapping.items(): - enabled, template = _values - if not enabled: + if not _values["enabled"]: self.log.debug("Key \"{}\" is disabled".format(key)) continue + template = _values["template"] try: value = template.format(**fill_data) diff --git a/client/ayon_core/hosts/nuke/plugins/publish/help/validate_write_nodes.xml b/client/ayon_core/hosts/nuke/plugins/publish/help/validate_write_nodes.xml index 1717622a45..96aa6e4494 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/help/validate_write_nodes.xml +++ b/client/ayon_core/hosts/nuke/plugins/publish/help/validate_write_nodes.xml @@ -25,7 +25,7 @@ ### How to repair? Contact your supervisor or fix it in project settings at - 'project_settings/nuke/imageio/nodes/requiredNodes' at knobs. + 'project_settings/nuke/imageio/nodes/required_nodes' at knobs. Each '__legacy__' type has to be defined accordingly to its type. diff --git a/client/ayon_core/hosts/nuke/plugins/publish/validate_asset_context.py b/client/ayon_core/hosts/nuke/plugins/publish/validate_asset_context.py index b4814c6a00..52ef4a58d4 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/validate_asset_context.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/validate_asset_context.py @@ -23,10 +23,10 @@ class ValidateCorrectAssetContext( current asset (shot). This validator checks if this is so. It is optional so it can be disabled when needed. - Checking `asset` and `task` keys. + Checking `folderPath` and `task` keys. """ order = ValidateContentsOrder - label = "Validate asset context" + label = "Validate Folder context" hosts = ["nuke"] actions = [ RepairAction, @@ -85,7 +85,7 @@ class ValidateCorrectAssetContext( """Get invalid keys from instance data and context data.""" invalid_keys = [] - testing_keys = ["asset", "task"] + testing_keys = ["folderPath", "task"] for _key in testing_keys: if _key not in instance.data: invalid_keys.append(_key) diff --git a/client/ayon_core/hosts/nuke/plugins/publish/validate_exposed_knobs.py b/client/ayon_core/hosts/nuke/plugins/publish/validate_exposed_knobs.py index 9111bcdc2c..217fe6fb85 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/validate_exposed_knobs.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/validate_exposed_knobs.py @@ -29,7 +29,8 @@ class RepairExposedKnobs(pyblish.api.Action): if x.Class() == "Write": write_node = x - plugin_name = plugin.families_mapping[instance.data["family"]] + product_type = instance.data["productType"] + plugin_name = plugin.product_types_mapping[product_type] nuke_settings = instance.context.data["project_settings"]["nuke"] create_settings = nuke_settings["create"][plugin_name] exposed_knobs = create_settings["exposed_knobs"] @@ -51,7 +52,7 @@ class ValidateExposedKnobs( label = "Validate Exposed Knobs" actions = [RepairExposedKnobs] hosts = ["nuke"] - families_mapping = { + product_types_mapping = { "render": "CreateWriteRender", "prerender": "CreateWritePrerender", "image": "CreateWriteImage" @@ -61,11 +62,12 @@ class ValidateExposedKnobs( if not self.is_active(instance.data): return - plugin = self.families_mapping[instance.data["family"]] + product_type = instance.data["productType"] + plugin = self.product_types_mapping[product_type] group_node = instance.data["transientData"]["node"] nuke_settings = instance.context.data["project_settings"]["nuke"] create_settings = nuke_settings["create"][plugin] - exposed_knobs = create_settings["exposed_knobs"] + exposed_knobs = create_settings.get("exposed_knobs", []) unexposed_knobs = [] for knob in exposed_knobs: if knob not in group_node.knobs(): diff --git a/client/ayon_core/hosts/nuke/plugins/publish/validate_knobs.py b/client/ayon_core/hosts/nuke/plugins/publish/validate_knobs.py index 84efebab53..281e172788 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/validate_knobs.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/validate_knobs.py @@ -30,6 +30,8 @@ class ValidateKnobs(pyblish.api.ContextPlugin): actions = [RepairContextAction] optional = True + knobs = "{}" + def process(self, context): invalid = self.get_invalid(context, compute=True) if invalid: @@ -61,9 +63,11 @@ class ValidateKnobs(pyblish.api.ContextPlugin): invalid_knobs = [] for instance in context: + # Load fresh knobs data for each instance + settings_knobs = json.loads(cls.knobs) # Filter families. - families = [instance.data["family"]] + families = [instance.data["productType"]] families += instance.data.get("families", []) # Get all knobs to validate. @@ -74,12 +78,12 @@ class ValidateKnobs(pyblish.api.ContextPlugin): family = family.split(".")[0] # avoid families not in settings - if family not in cls.knobs: + if family not in settings_knobs: continue # get presets of knobs - for preset in cls.knobs[family]: - knobs[preset] = cls.knobs[family][preset] + for preset in settings_knobs[family]: + knobs[preset] = settings_knobs[family][preset] # Get invalid knobs. nodes = [] diff --git a/client/ayon_core/hosts/nuke/plugins/publish/validate_write_nodes.py b/client/ayon_core/hosts/nuke/plugins/publish/validate_write_nodes.py index 4274d68826..0244c1d504 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -86,7 +86,10 @@ class ValidateNukeWriteNode( # Collect key values of same type in a list. values_by_name = defaultdict(list) for knob_data in correct_data["knobs"]: - values_by_name[knob_data["name"]].append(knob_data["value"]) + knob_type = knob_data["type"] + knob_value = knob_data[knob_type] + + values_by_name[knob_data["name"]].append(knob_value) for knob_data in correct_data["knobs"]: knob_type = knob_data["type"] @@ -97,7 +100,7 @@ class ValidateNukeWriteNode( raise PublishXmlValidationError( self, ( "Please update data in settings 'project_settings" - "/nuke/imageio/nodes/requiredNodes'" + "/nuke/imageio/nodes/required_nodes'" ), key="legacy" ) @@ -129,7 +132,7 @@ class ValidateNukeWriteNode( and key != "file" and key != "tile_color" ): - check.append([key, node_value, write_node[key].value()]) + check.append([key, fixed_values, write_node[key].value()]) if check: self._make_error(check) @@ -137,7 +140,7 @@ class ValidateNukeWriteNode( def _make_error(self, check): # sourcery skip: merge-assign-and-aug-assign, move-assign-in-block dbg_msg = "Write node's knobs values are not correct!\n" - msg_add = "Knob '{0}' > Correct: `{1}` > Wrong: `{2}`" + msg_add = "Knob '{0}' > Expected: `{1}` > Current: `{2}`" details = [ msg_add.format(item[0], item[1], item[2]) diff --git a/client/ayon_core/hosts/nuke/startup/custom_write_node.py b/client/ayon_core/hosts/nuke/startup/custom_write_node.py index 01e255d0c0..84e99f34c4 100644 --- a/client/ayon_core/hosts/nuke/startup/custom_write_node.py +++ b/client/ayon_core/hosts/nuke/startup/custom_write_node.py @@ -111,9 +111,14 @@ class WriteNodeKnobSettingPanel(nukescripts.PythonPanel): ) for write_node in write_selected_nodes: # data for mapping the path + # TODO add more fill data + product_name = write_node["name"].value() data = { - "work": os.getenv("AVALON_WORKDIR"), - "subset": write_node["name"].value(), + "work": os.getenv("AYON_WORKDIR"), + "subset": product_name, + "product": { + "name": product_name, + }, "frame": "#" * frame_padding, "ext": ext } @@ -127,8 +132,8 @@ class WriteNodeKnobSettingPanel(nukescripts.PythonPanel): knobs_nodes = [] settings = [ node_settings for node_settings - in get_nuke_imageio_settings()["nodes"]["overrideNodes"] - if node_settings["nukeNodeClass"] == "Write" + in get_nuke_imageio_settings()["nodes"]["override_nodes"] + if node_settings["nuke_node_class"] == "Write" and node_settings["subsets"] ] if not settings: @@ -139,6 +144,7 @@ class WriteNodeKnobSettingPanel(nukescripts.PythonPanel): knobs_nodes = settings[i]["knobs"] for setting in settings: + # TODO change 'subsets' to 'product_names' in settings for subset in setting["subsets"]: preset_name.append(subset) diff --git a/client/ayon_core/hosts/photoshop/addon.py b/client/ayon_core/hosts/photoshop/addon.py index 0c7efdb317..3016912960 100644 --- a/client/ayon_core/hosts/photoshop/addon.py +++ b/client/ayon_core/hosts/photoshop/addon.py @@ -1,16 +1,13 @@ import os -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon PHOTOSHOP_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) -class PhotoshopAddon(OpenPypeModule, IHostAddon): +class PhotoshopAddon(AYONAddon, IHostAddon): name = "photoshop" host_name = "photoshop" - def initialize(self, module_settings): - self.enabled = True - def add_implementation_envs(self, env, _app): """Modify environments to contain all required for implementation.""" defaults = { diff --git a/client/ayon_core/hosts/photoshop/api/README.md b/client/ayon_core/hosts/photoshop/api/README.md index 72e2217829..9383bdaade 100644 --- a/client/ayon_core/hosts/photoshop/api/README.md +++ b/client/ayon_core/hosts/photoshop/api/README.md @@ -55,7 +55,7 @@ class CreateImage(photoshop.Creator): name = "imageDefault" label = "Image" - family = "image" + product_type = "image" def __init__(self, *args, **kwargs): super(CreateImage, self).__init__(*args, **kwargs) @@ -75,7 +75,7 @@ class CollectInstances(pyblish.api.ContextPlugin): an LayerSet and marked with a unique identifier; Identifier: - id (str): "pyblish.avalon.instance" + id (str): "ayon.create.instance" """ label = "Instances" @@ -114,7 +114,7 @@ class CollectInstances(pyblish.api.ContextPlugin): instance.append(layer) instance.data.update(layer_data) instance.data["families"] = self.families_mapping[ - layer_data["family"] + layer_data["productType"] ] instance.data["publish"] = layer.visible @@ -224,23 +224,23 @@ class ImageLoader(load.LoaderPlugin): self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): layer = container.pop("layer") - + repre_doc = context["representation"] with photoshop.maintained_selection(): stub.replace_smart_object( - layer, get_representation_path(representation) + layer, get_representation_path(repre_doc) ) stub.imprint( - layer, {"representation": str(representation["_id"])} + layer, {"representation": str(repre_doc["_id"])} ) def remove(self, container): container["layer"].Delete() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) ``` For easier debugging of Javascript: https://community.adobe.com/t5/download-install/adobe-extension-debuger-problem/td-p/10911704?page=1 diff --git a/client/ayon_core/hosts/photoshop/api/lib.py b/client/ayon_core/hosts/photoshop/api/lib.py index 3111503e40..af14e6d02f 100644 --- a/client/ayon_core/hosts/photoshop/api/lib.py +++ b/client/ayon_core/hosts/photoshop/api/lib.py @@ -3,12 +3,11 @@ import sys import contextlib import traceback -from ayon_core.lib import env_value_to_bool, Logger +from ayon_core.lib import env_value_to_bool, Logger, is_in_tests from ayon_core.addon import AddonsManager from ayon_core.pipeline import install_host from ayon_core.tools.utils import host_tools from ayon_core.tools.utils import get_ayon_qt_app -from ayon_core.tests.lib import is_in_tests from .launch_logic import ProcessLauncher, stub diff --git a/client/ayon_core/hosts/photoshop/api/pipeline.py b/client/ayon_core/hosts/photoshop/api/pipeline.py index 046ec8e6ee..4e9a861220 100644 --- a/client/ayon_core/hosts/photoshop/api/pipeline.py +++ b/client/ayon_core/hosts/photoshop/api/pipeline.py @@ -9,6 +9,8 @@ from ayon_core.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, AVALON_CONTAINER_ID, + AYON_INSTANCE_ID, + AVALON_INSTANCE_ID, ) from ayon_core.host import ( @@ -62,7 +64,7 @@ class PhotoshopHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): return None def work_root(self, session): - return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/") + return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/") def open_workfile(self, filepath): lib.stub().open(filepath) @@ -121,7 +123,9 @@ class PhotoshopHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): layers_meta = stub.get_layers_metadata() if layers_meta: for instance in layers_meta: - if instance.get("id") == "pyblish.avalon.instance": + if instance.get("id") in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: instances.append(instance) return instances diff --git a/client/ayon_core/hosts/photoshop/api/plugin.py b/client/ayon_core/hosts/photoshop/api/plugin.py index 22645a1f9b..d4eb38300f 100644 --- a/client/ayon_core/hosts/photoshop/api/plugin.py +++ b/client/ayon_core/hosts/photoshop/api/plugin.py @@ -4,19 +4,21 @@ from ayon_core.pipeline import LoaderPlugin from .launch_logic import stub -def get_unique_layer_name(layers, asset_name, subset_name): - """ - Gets all layer names and if 'asset_name_subset_name' is present, it - increases suffix by 1 (eg. creates unique layer name - for Loader) +def get_unique_layer_name(layers, asset_name, product_name): + """Prepare unique layer name. + + Gets all layer names and if '_' is present, + it adds suffix '1', or increases the suffix by 1. + Args: layers (list) of dict with layers info (name, id etc.) - asset_name (string): - subset_name (string): + asset_name (str): + product_name (str): Returns: - (string): name_00X (without version) + str: name_00X (without version) """ - name = "{}_{}".format(asset_name, subset_name) + name = "{}_{}".format(asset_name, product_name) names = {} for layer in layers: layer_name = re.sub(r'_\d{3}$', '', layer.name) diff --git a/client/ayon_core/hosts/photoshop/api/ws_stub.py b/client/ayon_core/hosts/photoshop/api/ws_stub.py index 42bad05f26..36fe0af2f8 100644 --- a/client/ayon_core/hosts/photoshop/api/ws_stub.py +++ b/client/ayon_core/hosts/photoshop/api/ws_stub.py @@ -117,16 +117,16 @@ class PhotoshopServerStub: Stores metadata in format: [{ "active":true, - "subset":"imageBG", - "family":"image", - "id":"pyblish.avalon.instance", - "asset":"Town", + "productName":"imageBG", + "productType":"image", + "id":"ayon.create.instance", + "folderPath":"Town", "uuid": "8" }] - for created instances OR [{ "schema": "openpype:container-2.0", - "id": "pyblish.avalon.instance", + "id": "ayon.create.instance", "name": "imageMG", "namespace": "Jungle_imageMG_001", "loader": "ImageLoader", @@ -419,9 +419,9 @@ class PhotoshopServerStub: Returns: (list) example: - {"8":{"active":true,"subset":"imageBG", - "family":"image","id":"pyblish.avalon.instance", - "asset":"Town"}} + {"8":{"active":true,"productName":"imageBG", + "productType":"image","id":"ayon.create.instance", + "folderPath":"/Town"}} 8 is layer(group) id - used for deletion, update etc. """ res = self.websocketserver.call(self.client.call('Photoshop.read')) diff --git a/client/ayon_core/hosts/photoshop/lib.py b/client/ayon_core/hosts/photoshop/lib.py index b905caf1bd..6d5be48bc2 100644 --- a/client/ayon_core/hosts/photoshop/lib.py +++ b/client/ayon_core/hosts/photoshop/lib.py @@ -34,7 +34,7 @@ class PSAutoCreator(AutoCreator): def create(self, options=None): existing_instance = None for instance in self.create_context.instances: - if instance.family == self.family: + if instance.product_type == self.product_type: existing_instance = instance break @@ -51,9 +51,12 @@ class PSAutoCreator(AutoCreator): if existing_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - self.default_variant, task_name, asset_doc, - project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, ) data = { "folderPath": asset_name, @@ -61,15 +64,19 @@ class PSAutoCreator(AutoCreator): "variant": self.default_variant } data.update(self.get_dynamic_data( - self.default_variant, task_name, asset_doc, - project_name, host_name, None + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, + None )) if not self.active_on_create: data["active"] = False new_instance = CreatedInstance( - self.family, subset_name, data, self + self.product_type, product_name, data, self ) self._add_instance_to_context(new_instance) api.stub().imprint(new_instance.get("instance_id"), @@ -80,24 +87,30 @@ class PSAutoCreator(AutoCreator): or existing_instance["task"] != task_name ): asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - self.default_variant, task_name, asset_doc, - project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, ) existing_instance["folderPath"] = asset_name existing_instance["task"] = task_name - existing_instance["subset"] = subset_name + existing_instance["productName"] = product_name -def clean_subset_name(subset_name): - """Clean all variants leftover {layer} from subset name.""" +def clean_product_name(product_name): + """Clean all variants leftover {layer} from product name.""" dynamic_data = prepare_template_data({"layer": "{layer}"}) for value in dynamic_data.values(): - if value in subset_name: - subset_name = (subset_name.replace(value, "") - .replace("__", "_") - .replace("..", ".")) + if value in product_name: + product_name = ( + product_name + .replace(value, "") + .replace("__", "_") + .replace("..", ".") + ) # clean trailing separator as Main_ pattern = r'[\W_]+$' replacement = '' - return re.sub(pattern, replacement, subset_name) + return re.sub(pattern, replacement, product_name) diff --git a/client/ayon_core/hosts/photoshop/plugins/create/create_flatten_image.py b/client/ayon_core/hosts/photoshop/plugins/create/create_flatten_image.py index 666fd52f78..11bf92d5fb 100644 --- a/client/ayon_core/hosts/photoshop/plugins/create/create_flatten_image.py +++ b/client/ayon_core/hosts/photoshop/plugins/create/create_flatten_image.py @@ -2,8 +2,8 @@ from ayon_core.pipeline import CreatedInstance from ayon_core.lib import BoolDef import ayon_core.hosts.photoshop.api as api -from ayon_core.hosts.photoshop.lib import PSAutoCreator, clean_subset_name -from ayon_core.pipeline.create import get_subset_name +from ayon_core.hosts.photoshop.lib import PSAutoCreator, clean_product_name +from ayon_core.pipeline.create import get_product_name from ayon_core.lib import prepare_template_data from ayon_core.client import get_asset_by_name @@ -12,10 +12,10 @@ class AutoImageCreator(PSAutoCreator): """Creates flatten image from all visible layers. Used in simplified publishing as auto created instance. - Must be enabled in Setting and template for subset name provided + Must be enabled in Setting and template for product name provided """ identifier = "auto_image" - family = "image" + product_type = "image" # Settings default_variant = "" @@ -43,9 +43,12 @@ class AutoImageCreator(PSAutoCreator): existing_instance_asset = existing_instance["folderPath"] if existing_instance is None: - subset_name = self.get_subset_name( - self.default_variant, task_name, asset_doc, - project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, ) data = { @@ -60,7 +63,7 @@ class AutoImageCreator(PSAutoCreator): data.update({"creator_attributes": creator_attributes}) new_instance = CreatedInstance( - self.family, subset_name, data, self + self.product_type, product_name, data, self ) self._add_instance_to_context(new_instance) api.stub().imprint(new_instance.get("instance_id"), @@ -70,13 +73,16 @@ class AutoImageCreator(PSAutoCreator): existing_instance_asset != asset_name or existing_instance["task"] != task_name ): - subset_name = self.get_subset_name( - self.default_variant, task_name, asset_doc, - project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + self.default_variant, + host_name, ) existing_instance["folderPath"] = asset_name existing_instance["task"] = task_name - existing_instance["subset"] = subset_name + existing_instance["productName"] = product_name api.stub().imprint(existing_instance.get("instance_id"), existing_instance.data_to_store()) @@ -119,18 +125,25 @@ class AutoImageCreator(PSAutoCreator): review for it though. """ - def get_subset_name( - self, - variant, - task_name, - asset_doc, - project_name, - host_name=None, - instance=None + def get_product_name( + self, + project_name, + asset_doc, + task_name, + variant, + host_name=None, + instance=None ): + if host_name is None: + host_name = self.create_context.host_name dynamic_data = prepare_template_data({"layer": "{layer}"}) - subset_name = get_subset_name( - self.family, variant, task_name, asset_doc, - project_name, host_name, dynamic_data=dynamic_data + product_name = get_product_name( + project_name, + asset_doc, + task_name, + host_name, + self.product_type, + variant, + dynamic_data=dynamic_data ) - return clean_subset_name(subset_name) + return clean_product_name(product_name) diff --git a/client/ayon_core/hosts/photoshop/plugins/create/create_image.py b/client/ayon_core/hosts/photoshop/plugins/create/create_image.py index a28872bba1..8806aad33c 100644 --- a/client/ayon_core/hosts/photoshop/plugins/create/create_image.py +++ b/client/ayon_core/hosts/photoshop/plugins/create/create_image.py @@ -8,9 +8,9 @@ from ayon_core.pipeline import ( CreatorError ) from ayon_core.lib import prepare_template_data -from ayon_core.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS +from ayon_core.pipeline.create import PRODUCT_NAME_ALLOWED_SYMBOLS from ayon_core.hosts.photoshop.api.pipeline import cache_and_get_instances -from ayon_core.hosts.photoshop.lib import clean_subset_name +from ayon_core.hosts.photoshop.lib import clean_product_name class ImageCreator(Creator): @@ -21,7 +21,7 @@ class ImageCreator(Creator): """ identifier = "image" label = "Image" - family = "image" + product_type = "image" description = "Image creator" # Settings @@ -29,7 +29,7 @@ class ImageCreator(Creator): mark_for_review = False active_on_create = True - def create(self, subset_name_from_ui, data, pre_create_data): + def create(self, product_name_from_ui, data, pre_create_data): groups_to_create = [] top_layers_to_wrap = [] create_empty_group = False @@ -47,19 +47,19 @@ class ImageCreator(Creator): else: top_layers_to_wrap.append(selected_item) else: - group = stub.group_selected_layers(subset_name_from_ui) + group = stub.group_selected_layers(product_name_from_ui) groups_to_create.append(group) else: stub.select_layers(stub.get_layers()) try: - group = stub.group_selected_layers(subset_name_from_ui) + group = stub.group_selected_layers(product_name_from_ui) except: raise CreatorError("Cannot group locked Background layer!") groups_to_create.append(group) # create empty group if nothing selected if not groups_to_create and not top_layers_to_wrap: - group = stub.create_group(subset_name_from_ui) + group = stub.create_group(product_name_from_ui) groups_to_create.append(group) # wrap each top level layer into separate new group @@ -69,27 +69,27 @@ class ImageCreator(Creator): groups_to_create.append(group) layer_name = '' - # use artist chosen option OR force layer if more subsets are created + # use artist chosen option OR force layer if more products are created # to differentiate them use_layer_name = (pre_create_data.get("use_layer_name") or len(groups_to_create) > 1) for group in groups_to_create: - subset_name = subset_name_from_ui # reset to name from creator UI + product_name = product_name_from_ui # reset to name from creator UI layer_names_in_hierarchy = [] created_group_name = self._clean_highlights(stub, group.name) if use_layer_name: layer_name = re.sub( - "[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS), + "[^{}]+".format(PRODUCT_NAME_ALLOWED_SYMBOLS), "", group.name ) - if "{layer}" not in subset_name.lower(): - subset_name += "{Layer}" + if "{layer}" not in product_name.lower(): + product_name += "{Layer}" layer_fill = prepare_template_data({"layer": layer_name}) - subset_name = subset_name.format(**layer_fill) - subset_name = clean_subset_name(subset_name) + product_name = product_name.format(**layer_fill) + product_name = clean_product_name(product_name) if group.long_name: for directory in group.long_name[::-1]: @@ -97,7 +97,7 @@ class ImageCreator(Creator): layer_names_in_hierarchy.append(name) data_update = { - "subset": subset_name, + "productName": product_name, "members": [str(group.id)], "layer_name": layer_name, "long_name": "_".join(layer_names_in_hierarchy) @@ -112,8 +112,9 @@ class ImageCreator(Creator): if not self.active_on_create: data["active"] = False - new_instance = CreatedInstance(self.family, subset_name, data, - self) + new_instance = CreatedInstance( + self.product_type, product_name, data, self + ) stub.imprint(new_instance.get("instance_id"), new_instance.data_to_store()) @@ -159,7 +160,7 @@ class ImageCreator(Creator): label="Create separate instance for each selected"), BoolDef("use_layer_name", default=False, - label="Use layer name in subset"), + label="Use layer name in product"), BoolDef( "mark_for_review", label="Create separate review", @@ -189,9 +190,9 @@ class ImageCreator(Creator): def get_detail_description(self): return """Creator for Image instances - Main publishable item in Photoshop will be of `image` family. Result of - this item (instance) is picture that could be loaded and used - in another DCCs (for example as single layer in composition in + Main publishable item in Photoshop will be of `image` product type. + Result of this item (instance) is picture that could be loaded and + used in another DCCs (for example as single layer in composition in AfterEffects, reference in Maya etc). There are couple of options what to publish: @@ -207,13 +208,13 @@ class ImageCreator(Creator): Use 'Create separate instance for each selected' to create separate images per selected layer (group of layers). - 'Use layer name in subset' will explicitly add layer name into subset - name. Position of this name is configurable in - `project_settings/global/tools/creator/subset_name_profiles`. - If layer placeholder ({layer}) is not used in `subset_name_profiles` + 'Use layer name in product' will explicitly add layer name into + product name. Position of this name is configurable in + `project_settings/global/tools/creator/product_name_profiles`. + If layer placeholder ({layer}) is not used in `product_name_profiles` but layer name should be used (set explicitly in UI or implicitly if multiple images should be created), it is added in capitalized form - as a suffix to subset name. + as a suffix to product name. Each image could have its separate review created if necessary via `Create separate review` toggle. @@ -243,8 +244,15 @@ class ImageCreator(Creator): return item.replace(stub.PUBLISH_ICON, '').replace(stub.LOADED_ICON, '') - def get_dynamic_data(self, variant, task_name, asset_doc, - project_name, host_name, instance): + def get_dynamic_data( + self, + project_name, + asset_doc, + task_name, + variant, + host_name, + instance + ): if instance is not None: layer_name = instance.get("layer_name") if layer_name: diff --git a/client/ayon_core/hosts/photoshop/plugins/create/create_review.py b/client/ayon_core/hosts/photoshop/plugins/create/create_review.py index 888b294248..229b736801 100644 --- a/client/ayon_core/hosts/photoshop/plugins/create/create_review.py +++ b/client/ayon_core/hosts/photoshop/plugins/create/create_review.py @@ -4,7 +4,7 @@ from ayon_core.hosts.photoshop.lib import PSAutoCreator class ReviewCreator(PSAutoCreator): """Creates review instance which might be disabled from publishing.""" identifier = "review" - family = "review" + product_type = "review" default_variant = "Main" diff --git a/client/ayon_core/hosts/photoshop/plugins/create/create_workfile.py b/client/ayon_core/hosts/photoshop/plugins/create/create_workfile.py index 3485027215..da0c9d1d12 100644 --- a/client/ayon_core/hosts/photoshop/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/photoshop/plugins/create/create_workfile.py @@ -3,7 +3,7 @@ from ayon_core.hosts.photoshop.lib import PSAutoCreator class WorkfileCreator(PSAutoCreator): identifier = "workfile" - family = "workfile" + product_type = "workfile" default_variant = "Main" diff --git a/client/ayon_core/hosts/photoshop/plugins/load/load_image.py b/client/ayon_core/hosts/photoshop/plugins/load/load_image.py index 0fa6bca901..ec6392bade 100644 --- a/client/ayon_core/hosts/photoshop/plugins/load/load_image.py +++ b/client/ayon_core/hosts/photoshop/plugins/load/load_image.py @@ -36,13 +36,13 @@ class ImageLoader(photoshop.PhotoshopLoader): self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): """ Switch asset or change version """ stub = self.get_stub() layer = container.pop("layer") - context = representation.get("context", {}) + repre_doc = context["representation"] namespace_from_container = re.sub(r'_\d{3}$', '', container["namespace"]) @@ -55,14 +55,14 @@ class ImageLoader(photoshop.PhotoshopLoader): else: # switching version - keep same name layer_name = container["namespace"] - path = get_representation_path(representation) + path = get_representation_path(repre_doc) with photoshop.maintained_selection(): stub.replace_smart_object( layer, path, layer_name ) stub.imprint( - layer.id, {"representation": str(representation["_id"])} + layer.id, {"representation": str(repre_doc["_id"])} ) def remove(self, container): @@ -77,8 +77,8 @@ class ImageLoader(photoshop.PhotoshopLoader): stub.imprint(layer.id, {}) stub.delete_layer(layer.id) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def import_layer(self, file_name, layer_name, stub): return stub.import_smart_object(file_name, layer_name) diff --git a/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py b/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py index 06ac70041e..49ca513bd2 100644 --- a/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py +++ b/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py @@ -86,7 +86,7 @@ class ImageFromSequenceLoader(photoshop.PhotoshopLoader): ) ] - def update(self, container, representation): + def update(self, container, context): """No update possible, not containerized.""" pass diff --git a/client/ayon_core/hosts/photoshop/plugins/load/load_reference.py b/client/ayon_core/hosts/photoshop/plugins/load/load_reference.py index e2fec039d0..f83272f97d 100644 --- a/client/ayon_core/hosts/photoshop/plugins/load/load_reference.py +++ b/client/ayon_core/hosts/photoshop/plugins/load/load_reference.py @@ -37,32 +37,37 @@ class ReferenceLoader(photoshop.PhotoshopLoader): self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): """ Switch asset or change version """ stub = self.get_stub() layer = container.pop("layer") - context = representation.get("context", {}) + asset_doc = context["asset"] + subset_doc = context["subset"] + repre_doc = context["representation"] + + folder_name = asset_doc["name"] + product_name = subset_doc["name"] namespace_from_container = re.sub(r'_\d{3}$', '', container["namespace"]) - layer_name = "{}_{}".format(context["asset"], context["subset"]) + layer_name = "{}_{}".format(folder_name, product_name) # switching assets if namespace_from_container != layer_name: layer_name = get_unique_layer_name( - stub.get_layers(), context["asset"], context["subset"] + stub.get_layers(), folder_name, product_name ) else: # switching version - keep same name layer_name = container["namespace"] - path = get_representation_path(representation) + path = get_representation_path(repre_doc) with photoshop.maintained_selection(): stub.replace_smart_object( layer, path, layer_name ) stub.imprint( - layer.id, {"representation": str(representation["_id"])} + layer.id, {"representation": str(repre_doc["_id"])} ) def remove(self, container): @@ -76,8 +81,8 @@ class ReferenceLoader(photoshop.PhotoshopLoader): stub.imprint(layer.id, {}) stub.delete_layer(layer.id) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def import_layer(self, file_name, layer_name, stub): return stub.import_smart_object( diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_image.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_image.py index 051a3da0a1..7773b444d2 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_image.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_image.py @@ -2,7 +2,7 @@ import pyblish.api from ayon_core.client import get_asset_name_identifier from ayon_core.hosts.photoshop import api as photoshop -from ayon_core.pipeline.create import get_subset_name +from ayon_core.pipeline.create import get_product_name class CollectAutoImage(pyblish.api.ContextPlugin): @@ -28,7 +28,7 @@ class CollectAutoImage(pyblish.api.ContextPlugin): task_name = context.data["task"] host_name = context.data["hostName"] asset_doc = context.data["assetEntity"] - asset_name = get_asset_name_identifier(asset_doc) + folder_path = get_asset_name_identifier(asset_doc) auto_creator = proj_settings.get( "photoshop", {}).get( @@ -67,7 +67,7 @@ class CollectAutoImage(pyblish.api.ContextPlugin): # active might not be in legacy meta if layer_meta_data.get("active", True) and layer_item.visible: - instance_names.append(layer_meta_data["subset"]) + instance_names.append(layer_meta_data["productName"]) if len(instance_names) == 0: variants = proj_settings.get( @@ -75,25 +75,31 @@ class CollectAutoImage(pyblish.api.ContextPlugin): "create", {}).get( "CreateImage", {}).get( "default_variants", ['']) - family = "image" + product_type = "image" variant = context.data.get("variant") or variants[0] - subset_name = get_subset_name( - family, variant, task_name, asset_doc, - project_name, host_name + product_name = get_product_name( + project_name, + asset_doc, + task_name, + host_name, + product_type, + variant, ) - instance = context.create_instance(subset_name) - instance.data["family"] = family - instance.data["asset"] = asset_name - instance.data["subset"] = subset_name + instance = context.create_instance(product_name) + instance.data["folderPath"] = folder_path + instance.data["productType"] = product_type + instance.data["productName"] = product_name instance.data["ids"] = publishable_ids instance.data["publish"] = True instance.data["creator_identifier"] = "auto_image" + instance.data["family"] = product_type + instance.data["families"] = [product_type] if auto_creator["mark_for_review"]: instance.data["creator_attributes"] = {"mark_for_review": True} - instance.data["families"] = ["review"] + instance.data["families"].append("review") self.log.info("auto image instance: {} ".format(instance.data)) diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_review.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_review.py index c8d4ddf111..14f2f23985 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_review.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_review.py @@ -3,13 +3,13 @@ Requires: None Provides: - instance -> family ("review") + instance -> productType ("review") """ import pyblish.api from ayon_core.client import get_asset_name_identifier from ayon_core.hosts.photoshop import api as photoshop -from ayon_core.pipeline.create import get_subset_name +from ayon_core.pipeline.create import get_product_name class CollectAutoReview(pyblish.api.ContextPlugin): @@ -26,10 +26,10 @@ class CollectAutoReview(pyblish.api.ContextPlugin): publish = True def process(self, context): - family = "review" + product_type = "review" has_review = False for instance in context: - if instance.data["family"] == family: + if instance.data["productType"] == product_type: self.log.debug("Review instance found, won't create new") has_review = True @@ -44,7 +44,7 @@ class CollectAutoReview(pyblish.api.ContextPlugin): stub = photoshop.stub() stored_items = stub.get_layers_metadata() for item in stored_items: - if item.get("creator_identifier") == family: + if item.get("creator_identifier") == product_type: if not item.get("active"): self.log.debug("Review instance disabled") return @@ -67,27 +67,28 @@ class CollectAutoReview(pyblish.api.ContextPlugin): host_name = context.data["hostName"] asset_doc = context.data["assetEntity"] - asset_name = get_asset_name_identifier(asset_doc) + folder_path = get_asset_name_identifier(asset_doc) - subset_name = get_subset_name( - family, - variant, - task_name, - asset_doc, + product_name = get_product_name( project_name, - host_name=host_name, + asset_doc, + task_name, + host_name, + product_type, + variant, project_settings=proj_settings ) - instance = context.create_instance(subset_name) + instance = context.create_instance(product_name) instance.data.update({ - "subset": subset_name, - "label": subset_name, - "name": subset_name, - "family": family, - "families": [], + "label": product_name, + "name": product_name, + "productName": product_name, + "productType": product_type, + "family": product_type, + "families": [product_type], "representations": [], - "asset": asset_name, + "folderPath": folder_path, "publish": self.publish }) diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_workfile.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_workfile.py index 365fd0a684..0b12195603 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_workfile.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_workfile.py @@ -3,7 +3,7 @@ import pyblish.api from ayon_core.client import get_asset_name_identifier from ayon_core.hosts.photoshop import api as photoshop -from ayon_core.pipeline.create import get_subset_name +from ayon_core.pipeline.create import get_product_name class CollectAutoWorkfile(pyblish.api.ContextPlugin): @@ -16,7 +16,7 @@ class CollectAutoWorkfile(pyblish.api.ContextPlugin): targets = ["automated"] def process(self, context): - family = "workfile" + product_type = "workfile" file_path = context.data["currentFile"] _, ext = os.path.splitext(file_path) staging_dir = os.path.dirname(file_path) @@ -29,7 +29,7 @@ class CollectAutoWorkfile(pyblish.api.ContextPlugin): } for instance in context: - if instance.data["family"] == family: + if instance.data["productType"] == product_type: self.log.debug("Workfile instance found, won't create new") instance.data.update({ "label": base_name, @@ -47,7 +47,7 @@ class CollectAutoWorkfile(pyblish.api.ContextPlugin): stub = photoshop.stub() stored_items = stub.get_layers_metadata() for item in stored_items: - if item.get("creator_identifier") == family: + if item.get("creator_identifier") == product_type: if not item.get("active"): self.log.debug("Workfile instance disabled") return @@ -71,27 +71,28 @@ class CollectAutoWorkfile(pyblish.api.ContextPlugin): host_name = context.data["hostName"] asset_doc = context.data["assetEntity"] - asset_name = get_asset_name_identifier(asset_doc) - subset_name = get_subset_name( - family, - variant, - task_name, - asset_doc, + folder_path = get_asset_name_identifier(asset_doc) + product_name = get_product_name( project_name, - host_name=host_name, + asset_doc, + task_name, + host_name, + product_type, + variant, project_settings=proj_settings ) # Create instance - instance = context.create_instance(subset_name) + instance = context.create_instance(product_name) instance.data.update({ - "subset": subset_name, "label": base_name, "name": base_name, - "family": family, - "families": [], + "productName": product_name, + "productType": product_type, + "family": product_type, + "families": [product_type], "representations": [], - "asset": asset_name + "folderPath": folder_path }) # creating representation diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_batch_data.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_batch_data.py index 5e43a021c3..a32b5f8fa5 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_batch_data.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_batch_data.py @@ -2,7 +2,7 @@ Provides: context -> Loaded batch file. - - asset + - folderPath - task (task name) - taskType - project_name @@ -17,12 +17,11 @@ import os import pyblish.api -from ayon_core.pipeline import legacy_io from openpype_modules.webpublisher.lib import ( get_batch_asset_task_info, parse_json ) -from ayon_core.tests.lib import is_in_tests +from ayon_core.lib import is_in_tests class CollectBatchData(pyblish.api.ContextPlugin): @@ -53,10 +52,10 @@ class CollectBatchData(pyblish.api.ContextPlugin): assert os.path.exists(batch_dir), \ "Folder {} doesn't exist".format(batch_dir) - project_name = os.environ.get("AVALON_PROJECT") + project_name = os.environ.get("AYON_PROJECT_NAME") if project_name is None: raise AssertionError( - "Environment `AVALON_PROJECT` was not found." + "Environment `AYON_PROJECT_NAME` was not found." "Could not set project `root` which may cause issues." ) @@ -69,12 +68,10 @@ class CollectBatchData(pyblish.api.ContextPlugin): batch_data["context"] ) - os.environ["AVALON_ASSET"] = asset_name - os.environ["AVALON_TASK"] = task_name - legacy_io.Session["AVALON_ASSET"] = asset_name - legacy_io.Session["AVALON_TASK"] = task_name + os.environ["AYON_FOLDER_PATH"] = asset_name + os.environ["AYON_TASK_NAME"] = task_name - context.data["asset"] = asset_name + context.data["folderPath"] = asset_name context.data["task"] = task_name context.data["taskType"] = task_type context.data["project_name"] = project_name diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_color_coded_instances.py index e309da62ba..f11ba4383a 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_color_coded_instances.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_color_coded_instances.py @@ -3,10 +3,9 @@ import re import pyblish.api -from ayon_core.lib import prepare_template_data +from ayon_core.lib import prepare_template_data, is_in_tests from ayon_core.hosts.photoshop import api as photoshop from ayon_core.settings import get_project_settings -from ayon_core.tests.lib import is_in_tests class CollectColorCodedInstances(pyblish.api.ContextPlugin): @@ -14,35 +13,31 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): Used in remote publishing when artists marks publishable layers by color- coding. Top level layers (group) must be marked by specific color to be - published as an instance of 'image' family. + published as an instance of 'image' product type. Can add group for all publishable layers to allow creation of flattened image. (Cannot contain special background layer as it cannot be grouped!) Based on value `create_flatten_image` from Settings: - - "yes": create flattened 'image' subset of all publishable layers + create - 'image' subset per publishable layer - - "only": create ONLY flattened 'image' subset of all publishable layers - - "no": do not create flattened 'image' subset at all, - only separate subsets per marked layer. + - "yes": create flattened 'image' product of all publishable layers + create + 'image' product per publishable layer + - "only": create ONLY flattened 'image' product of all publishable layers + - "no": do not create flattened 'image' product at all, + only separate products per marked layer. Identifier: - id (str): "pyblish.avalon.instance" + id (str): "ayon.create.instance" """ - order = pyblish.api.CollectorOrder + 0.100 - label = "Instances" + label = "Collect Color-coded Instances" order = pyblish.api.CollectorOrder hosts = ["photoshop"] targets = ["automated"] # configurable by Settings color_code_mapping = [] - # TODO check if could be set globally, probably doesn't make sense when - # flattened template cannot - subset_template_name = "" create_flatten_image = "no" - flatten_subset_template = "" + flatten_product_name_template = "" def process(self, context): self.log.info("CollectColorCodedInstances") @@ -50,15 +45,19 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): os.environ.get("AYON_PUBLISH_DATA") or os.environ.get("OPENPYPE_PUBLISH_DATA") ) - if (is_in_tests() and - (not batch_dir or not os.path.exists(batch_dir))): + if ( + is_in_tests() + and ( + not batch_dir or not os.path.exists(batch_dir) + ) + ): self.log.debug("Automatic testing, no batch data, skipping") return - existing_subset_names = self._get_existing_subset_names(context) + existing_product_names = self._get_existing_product_names(context) # from CollectBatchData - asset_name = context.data["asset"] + asset_name = context.data["folderPath"] task_name = context.data["task"] variant = context.data["variant"] project_name = context.data["projectEntity"]["name"] @@ -73,7 +72,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): publishable_layers = [] created_instances = [] - family_from_settings = None + product_type_from_settings = None for layer in layers: self.log.debug("Layer:: {}".format(layer)) if layer.parents: @@ -84,60 +83,73 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): self.log.debug("Not visible, skip") continue - resolved_family, resolved_subset_template = self._resolve_mapping( - layer + resolved_product_type, resolved_product_template = ( + self._resolve_mapping(layer) ) - if not resolved_subset_template or not resolved_family: - self.log.debug("!!! Not found family or template, skip") + if not resolved_product_template or not resolved_product_type: + self.log.debug("!!! Not found product type or template, skip") continue - if not family_from_settings: - family_from_settings = resolved_family + if not product_type_from_settings: + product_type_from_settings = resolved_product_type fill_pairs = { "variant": variant, - "family": resolved_family, + "family": resolved_product_type, + "product": {"type": resolved_product_type}, "task": task_name, "layer": layer.clean_name } - subset = resolved_subset_template.format( + product_name = resolved_product_template.format( **prepare_template_data(fill_pairs)) - subset = self._clean_subset_name(stub, naming_conventions, - subset, layer) + product_name = self._clean_product_name( + stub, naming_conventions, product_name, layer + ) - if subset in existing_subset_names: - self.log.info( - "Subset {} already created, skipping.".format(subset)) + if product_name in existing_product_names: + self.log.info(( + "Product {} already created, skipping." + ).format(product_name)) continue if self.create_flatten_image != "flatten_only": - instance = self._create_instance(context, layer, - resolved_family, - asset_name, subset, task_name) + instance = self._create_instance( + context, + layer, + resolved_product_type, + asset_name, + product_name, + task_name + ) created_instances.append(instance) - existing_subset_names.append(subset) + existing_product_names.append(product_name) publishable_layers.append(layer) if self.create_flatten_image != "no" and publishable_layers: self.log.debug("create_flatten_image") - if not self.flatten_subset_template: + if not self.flatten_product_name_template: self.log.warning("No template for flatten image") return fill_pairs.pop("layer") - subset = self.flatten_subset_template.format( + product_name = self.flatten_product_name_template.format( **prepare_template_data(fill_pairs)) first_layer = publishable_layers[0] # dummy layer - first_layer.name = subset - family = family_from_settings # inherit family - instance = self._create_instance(context, first_layer, - family, - asset_name, subset, task_name) + first_layer.name = product_name + product_type = product_type_from_settings # inherit product type + instance = self._create_instance( + context, + first_layer, + product_type, + asset_name, + product_name, + task_name + ) instance.data["ids"] = [layer.id for layer in publishable_layers] created_instances.append(instance) @@ -147,29 +159,37 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): self.log.info("Found: \"%s\" " % instance.data["name"]) self.log.info("instance: {} ".format(instance.data)) - def _get_existing_subset_names(self, context): + def _get_existing_product_names(self, context): """Collect manually created instances from workfile. Shouldn't be any as Webpublisher bypass publishing via Openpype, but might be some if workfile published through OP is reused. """ - existing_subset_names = [] + existing_product_names = [] for instance in context: - if instance.data.get('publish'): - existing_subset_names.append(instance.data.get('subset')) + if instance.data.get("publish") is not False: + existing_product_names.append(instance.data.get("productName")) - return existing_subset_names + return existing_product_names - def _create_instance(self, context, layer, family, - asset, subset, task_name): + def _create_instance( + self, + context, + layer, + product_type, + folder_path, + product_name, + task_name + ): instance = context.create_instance(layer.name) - instance.data["family"] = family instance.data["publish"] = True - instance.data["asset"] = asset + instance.data["productType"] = product_type + instance.data["productName"] = product_name + instance.data["folderPath"] = folder_path instance.data["task"] = task_name - instance.data["subset"] = subset instance.data["layer"] = layer - instance.data["families"] = [] + instance.data["family"] = product_type + instance.data["families"] = [product_type] return instance @@ -179,50 +199,63 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): If both color code AND name regex is configured, BOTH must be valid If layer matches to multiple mappings, only first is used! """ - family_list = [] - family = None - subset_name_list = [] - resolved_subset_template = None + product_type_list = [] + product_name_list = [] for mapping in self.color_code_mapping: - if mapping["color_code"] and \ - layer.color_code not in mapping["color_code"]: + if ( + mapping["color_code"] + and layer.color_code not in mapping["color_code"] + ): continue - if mapping["layer_name_regex"] and \ - not any(re.search(pattern, layer.name) - for pattern in mapping["layer_name_regex"]): + if ( + mapping["layer_name_regex"] + and not any( + re.search(pattern, layer.name) + for pattern in mapping["layer_name_regex"] + ) + ): continue - family_list.append(mapping["family"]) - subset_name_list.append(mapping["subset_template_name"]) - if len(subset_name_list) > 1: - self.log.warning("Multiple mappings found for '{}'". - format(layer.name)) - self.log.warning("Only first subset name template used!") - subset_name_list[:] = subset_name_list[0] + product_type_list.append(mapping["product_type"]) + product_name_list.append(mapping["product_name_template"]) - if len(family_list) > 1: - self.log.warning("Multiple mappings found for '{}'". - format(layer.name)) - self.log.warning("Only first family used!") - family_list[:] = family_list[0] - if subset_name_list: - resolved_subset_template = subset_name_list.pop() - if family_list: - family = family_list.pop() + if len(product_name_list) > 1: + self.log.warning( + "Multiple mappings found for '{}'".format(layer.name) + ) + self.log.warning("Only first product name template used!") + product_name_list[:] = product_name_list[0] - self.log.debug("resolved_family {}".format(family)) - self.log.debug("resolved_subset_template {}".format( - resolved_subset_template)) - return family, resolved_subset_template + if len(product_type_list) > 1: + self.log.warning( + "Multiple mappings found for '{}'".format(layer.name) + ) + self.log.warning("Only first product type used!") + product_type_list[:] = product_type_list[0] - def _clean_subset_name(self, stub, naming_conventions, subset, layer): - """Cleans invalid characters from subset name and layer name.""" - if re.search(naming_conventions["invalid_chars"], subset): - subset = re.sub( + resolved_product_template = None + if product_name_list: + resolved_product_template = product_name_list.pop() + + product_type = None + if product_type_list: + product_type = product_type_list.pop() + + self.log.debug("resolved_product_type {}".format(product_type)) + self.log.debug("resolved_product_template {}".format( + resolved_product_template)) + return product_type, resolved_product_template + + def _clean_product_name( + self, stub, naming_conventions, product_name, layer + ): + """Cleans invalid characters from product name and layer name.""" + if re.search(naming_conventions["invalid_chars"], product_name): + product_name = re.sub( naming_conventions["invalid_chars"], naming_conventions["replace_char"], - subset + product_name ) layer_name = re.sub( naming_conventions["invalid_chars"], @@ -232,4 +265,4 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): layer.name = layer_name stub.rename_layer(layer.id, layer_name) - return subset + return product_name diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_published_version.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_published_version.py index e330b04a1f..53b2503ba2 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_published_version.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_published_version.py @@ -4,7 +4,7 @@ For synchronization of published image and workfile version it is required to store workfile version from workfile file name in context.data["version"]. In remote publishing this name is unreliable (artist might not follow naming convention etc.), last published workfile version for particular workfile -subset is used instead. +product is used instead. This plugin runs only in remote publishing (eg. Webpublisher). @@ -30,13 +30,13 @@ class CollectPublishedVersion(pyblish.api.ContextPlugin): targets = ["automated"] def process(self, context): - workfile_subset_name = None + workfile_product_name = None for instance in context: - if instance.data["family"] == "workfile": - workfile_subset_name = instance.data["subset"] + if instance.data["productType"] == "workfile": + workfile_product_name = instance.data["productName"] break - if not workfile_subset_name: + if not workfile_product_name: self.log.warning("No workfile instance found, " "synchronization of version will not work.") return @@ -45,9 +45,9 @@ class CollectPublishedVersion(pyblish.api.ContextPlugin): asset_doc = context.data["assetEntity"] asset_id = asset_doc["_id"] - version_doc = get_last_version_by_subset_name(project_name, - workfile_subset_name, - asset_id) + version_doc = get_last_version_by_subset_name( + project_name, workfile_product_name, asset_id + ) if version_doc: version_int = int(version_doc["name"]) + 1 diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_review.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_review.py index e487760736..1ffbadf022 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_review.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_review.py @@ -6,24 +6,17 @@ Provides: instance -> family ("review") """ -import os - import pyblish.api -from ayon_core.pipeline.create import get_subset_name - class CollectReview(pyblish.api.ContextPlugin): """Adds review to families for instances marked to be reviewable. """ label = "Collect Review" - label = "Review" hosts = ["photoshop"] order = pyblish.api.CollectorOrder + 0.1 - publish = True - def process(self, context): for instance in context: creator_attributes = instance.data["creator_attributes"] diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_workfile.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_workfile.py index 6740a6c82a..b9080a12ff 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_workfile.py @@ -1,8 +1,6 @@ import os import pyblish.api -from ayon_core.pipeline.create import get_subset_name - class CollectWorkfile(pyblish.api.ContextPlugin): """Collect current script for publish.""" @@ -15,7 +13,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): def process(self, context): for instance in context: - if instance.data["family"] == "workfile": + if instance.data["productType"] == "workfile": file_path = context.data["currentFile"] _, ext = os.path.splitext(file_path) staging_dir = os.path.dirname(file_path) diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/extract_image.py b/client/ayon_core/hosts/photoshop/plugins/publish/extract_image.py index 71605b53d9..7290a1437e 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/extract_image.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/extract_image.py @@ -36,7 +36,7 @@ class ExtractImage(pyblish.api.ContextPlugin): with photoshop.maintained_selection(): with photoshop.maintained_visibility(layers=all_layers): for instance in context: - if instance.data["family"] not in self.families: + if instance.data["productType"] not in self.families: continue staging_dir = self.staging_dir(instance) diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/extract_review.py b/client/ayon_core/hosts/photoshop/plugins/publish/extract_review.py index 732a53f194..3497e7ad75 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/extract_review.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/extract_review.py @@ -22,7 +22,8 @@ class ExtractReview(publish.Extractor): 'review' family could be used in other steps as a reference, as it contains flattened image by default. (Eg. artist could load this review as a single item and see full image. In most cases 'image' - family is separated by layers to better usage in animation or comp.) + product type is separated by layers to better usage in animation + or comp.) """ label = "Extract Review" @@ -55,18 +56,21 @@ class ExtractReview(publish.Extractor): "tags": self.jpg_options['tags'], } - if instance.data["family"] != "review": - self.log.debug("Existing extracted file from image family used.") + if instance.data["productType"] != "review": + self.log.debug( + "Existing extracted file from image product type used." + ) # enable creation of review, without this jpg review would clash - # with jpg of the image family + # with jpg of the image product type output_name = repre_name repre_name = "{}_{}".format(repre_name, output_name) repre_skeleton.update({"name": repre_name, "outputName": output_name}) img_file = self.output_seq_filename % 0 - self._prepare_file_for_image_family(img_file, instance, - staging_dir) + self._prepare_file_for_image_product_type( + img_file, instance, staging_dir + ) repre_skeleton.update({ "files": img_file, }) @@ -120,8 +124,10 @@ class ExtractReview(publish.Extractor): self.log.info(f"Extracted {instance} to {staging_dir}") - def _prepare_file_for_image_family(self, img_file, instance, staging_dir): - """Converts existing file for image family to .jpg + def _prepare_file_for_image_product_type( + self, img_file, instance, staging_dir + ): + """Converts existing file for image product type to .jpg Image instance could have its own separate review (instance per layer for example). This uses extracted file instead of extracting again. @@ -261,12 +267,15 @@ class ExtractReview(publish.Extractor): """ layers = [] # creating review for existing 'image' instance - if instance.data["family"] == "image" and instance.data.get("layer"): + if ( + instance.data["productType"] == "image" + and instance.data.get("layer") + ): layers.append(instance.data["layer"]) return layers for image_instance in instance.context: - if image_instance.data["family"] != "image": + if image_instance.data["productType"] != "image": continue if not image_instance.data.get("layer"): # dummy instance for flatten image diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/help/validate_naming.xml b/client/ayon_core/hosts/photoshop/plugins/publish/help/validate_naming.xml index 023bbf26fa..28c2c2c773 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/help/validate_naming.xml +++ b/client/ayon_core/hosts/photoshop/plugins/publish/help/validate_naming.xml @@ -3,9 +3,9 @@ Subset name -## Invalid subset or layer name +## Invalid product or layer name -Subset or layer name cannot contain specific characters (spaces etc) which could cause issue when subset name is used in a published file name. +Subset or layer name cannot contain specific characters (spaces etc) which could cause issue when product name is used in a published file name. {msg} ### How to repair? diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/validate_instance_asset.py b/client/ayon_core/hosts/photoshop/plugins/publish/validate_instance_asset.py index dc0f2efd52..67a7303316 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/validate_instance_asset.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/validate_instance_asset.py @@ -31,7 +31,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action): current_asset_name = get_current_asset_name() for instance in instances: data = stub.read(instance[0]) - data["asset"] = current_asset_name + data["folderPath"] = current_asset_name stub.imprint(instance[0], data) @@ -54,7 +54,7 @@ class ValidateInstanceAsset(OptionalPyblishPluginMixin, order = ValidateContentsOrder def process(self, instance): - instance_asset = instance.data["asset"] + instance_asset = instance.data["folderPath"] current_asset = get_current_asset_name() if instance_asset != current_asset: diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/validate_naming.py b/client/ayon_core/hosts/photoshop/plugins/publish/validate_naming.py index 89018a1cff..ce940c47ce 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/validate_naming.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/validate_naming.py @@ -3,7 +3,7 @@ import re import pyblish.api from ayon_core.hosts.photoshop import api as photoshop -from ayon_core.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS +from ayon_core.pipeline.create import PRODUCT_NAME_ALLOWED_SYMBOLS from ayon_core.pipeline.publish import ( ValidateContentsOrder, PublishXmlValidationError, @@ -51,17 +51,17 @@ class ValidateNamingRepair(pyblish.api.Action): stub.rename_layer(current_layer_state.id, layer_name) - subset_name = re.sub(invalid_chars, replace_char, - instance.data["subset"]) + product_name = re.sub(invalid_chars, replace_char, + instance.data["productName"]) # format from Tool Creator - subset_name = re.sub( - "[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS), + product_name = re.sub( + "[^{}]+".format(PRODUCT_NAME_ALLOWED_SYMBOLS), "", - subset_name + product_name ) - layer_meta["subset"] = subset_name + layer_meta["productName"] = product_name stub.imprint(instance_id, layer_meta) return True @@ -88,21 +88,24 @@ class ValidateNaming(pyblish.api.InstancePlugin): layer = instance.data.get("layer") if layer: - msg = "Name \"{}\" is not allowed.{}".format(layer.clean_name, - help_msg) - + msg = "Name \"{}\" is not allowed.{}".format( + layer.clean_name, help_msg + ) formatting_data = {"msg": msg} if re.search(self.invalid_chars, layer.clean_name): - raise PublishXmlValidationError(self, msg, - formatting_data=formatting_data - ) + raise PublishXmlValidationError( + self, msg, formatting_data=formatting_data + ) - msg = "Subset \"{}\" is not allowed.{}".format(instance.data["subset"], - help_msg) + product_name = instance.data["productName"] + msg = "Product \"{}\" is not allowed.{}".format( + product_name, help_msg + ) formatting_data = {"msg": msg} - if re.search(self.invalid_chars, instance.data["subset"]): - raise PublishXmlValidationError(self, msg, - formatting_data=formatting_data) + if re.search(self.invalid_chars, product_name): + raise PublishXmlValidationError( + self, msg, formatting_data=formatting_data + ) @classmethod def get_replace_chars(cls): diff --git a/client/ayon_core/hosts/resolve/addon.py b/client/ayon_core/hosts/resolve/addon.py index 9c9932826b..1354caabb2 100644 --- a/client/ayon_core/hosts/resolve/addon.py +++ b/client/ayon_core/hosts/resolve/addon.py @@ -1,17 +1,14 @@ import os -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon from .utils import RESOLVE_ROOT_DIR -class ResolveAddon(OpenPypeModule, IHostAddon): +class ResolveAddon(AYONAddon, IHostAddon): name = "resolve" host_name = "resolve" - def initialize(self, module_settings): - self.enabled = True - def get_launch_hook_paths(self, app): if app.host_name != self.host_name: return [] diff --git a/client/ayon_core/hosts/resolve/api/lib.py b/client/ayon_core/hosts/resolve/api/lib.py index 2c648bb4cc..6e4e17811f 100644 --- a/client/ayon_core/hosts/resolve/api/lib.py +++ b/client/ayon_core/hosts/resolve/api/lib.py @@ -519,9 +519,9 @@ def imprint(timeline_item, data=None): Examples: data = { - 'asset': 'sq020sh0280', - 'family': 'render', - 'subset': 'subsetMain' + 'folderPath': 'sq020sh0280', + 'productType': 'render', + 'productName': 'productMain' } """ data = data or {} diff --git a/client/ayon_core/hosts/resolve/api/pipeline.py b/client/ayon_core/hosts/resolve/api/pipeline.py index 2c5e0daf4b..19d33971dc 100644 --- a/client/ayon_core/hosts/resolve/api/pipeline.py +++ b/client/ayon_core/hosts/resolve/api/pipeline.py @@ -296,8 +296,8 @@ def list_instances(): if tag_data: asset = tag_data.get("asset") - subset = tag_data.get("subset") - tag_data["label"] = f"{ti_name} [{asset}-{subset}]" + product_name = tag_data.get("productName") + tag_data["label"] = f"{ti_name} [{asset}-{product_name}]" listed_instances.append(tag_data) return listed_instances diff --git a/client/ayon_core/hosts/resolve/api/plugin.py b/client/ayon_core/hosts/resolve/api/plugin.py index ccb20f712f..157b8de363 100644 --- a/client/ayon_core/hosts/resolve/api/plugin.py +++ b/client/ayon_core/hosts/resolve/api/plugin.py @@ -350,7 +350,7 @@ class ClipLoader: """ Gets context and convert it to self.data data structure: { - "name": "assetName_subsetName_representationName" + "name": "assetName_productName_representationName" "binPath": "projectBinPath", } """ @@ -358,17 +358,17 @@ class ClipLoader: representation = self.context["representation"] representation_context = representation["context"] asset = str(representation_context["asset"]) - subset = str(representation_context["subset"]) + product_name = str(representation_context["subset"]) representation_name = str(representation_context["representation"]) self.data["clip_name"] = "_".join([ asset, - subset, + product_name, representation_name ]) self.data["versionData"] = self.context["version"]["data"] self.data["timeline_basename"] = "timeline_{}_{}".format( - subset, representation_name) + product_name, representation_name) # solve project bin structure path hierarchy = str("/".join(( @@ -538,7 +538,7 @@ class TimelineItemLoader(LoaderPlugin): ): pass - def update(self, container, representation): + def update(self, container, context): """Update an existing `container` """ pass @@ -603,9 +603,9 @@ class PublishClip: rename_default = False hierarchy_default = "{_folder_}/{_sequence_}/{_track_}" clip_name_default = "shot_{_trackIndex_:0>3}_{_clipIndex_:0>4}" - subset_name_default = "" + base_product_name_default = "" review_track_default = "< none >" - subset_family_default = "plate" + product_type_default = "plate" count_from_default = 10 count_steps_default = 10 vertical_sync_default = False @@ -630,7 +630,6 @@ class PublishClip: self.track_name = str(track_name).replace(" ", "_") self.track_index = int(timeline_item_data["track"]["index"]) - # adding tag.family into tag if kwargs.get("avalon"): self.tag_data.update(kwargs["avalon"]) @@ -728,10 +727,10 @@ class PublishClip: "countFrom", {}).get("value") or self.count_from_default self.count_steps = self.ui_inputs.get( "countSteps", {}).get("value") or self.count_steps_default - self.subset_name = self.ui_inputs.get( - "subsetName", {}).get("value") or self.subset_name_default - self.subset_family = self.ui_inputs.get( - "subsetFamily", {}).get("value") or self.subset_family_default + self.base_product_name = self.ui_inputs.get( + "productName", {}).get("value") or self.base_product_name_default + self.product_type = self.ui_inputs.get( + "productType", {}).get("value") or self.product_type_default self.vertical_sync = self.ui_inputs.get( "vSyncOn", {}).get("value") or self.vertical_sync_default self.driving_layer = self.ui_inputs.get( @@ -739,12 +738,14 @@ class PublishClip: self.review_track = self.ui_inputs.get( "reviewTrack", {}).get("value") or self.review_track_default - # build subset name from layer name - if self.subset_name == "": - self.subset_name = self.track_name + # build product name from layer name + if self.base_product_name == "": + self.base_product_name = self.track_name - # create subset for publishing - self.subset = self.subset_family + self.subset_name.capitalize() + # create product name for publishing + self.product_name = ( + self.product_type + self.base_product_name.capitalize() + ) def _replace_hash_to_expression(self, name, text): """ Replace hash with number in correct padding. """ @@ -824,17 +825,19 @@ class PublishClip: # driving layer is set as negative match for (_in, _out), hero_data in self.vertical_clip_match.items(): hero_data.update({"heroTrack": False}) - if _in == self.clip_in and _out == self.clip_out: - data_subset = hero_data["subset"] - # add track index in case duplicity of names in hero data - if self.subset in data_subset: - hero_data["subset"] = self.subset + str( - self.track_index) - # in case track name and subset name is the same then add - if self.subset_name == self.track_name: - hero_data["subset"] = self.subset - # assign data to return hierarchy data to tag - tag_hierarchy_data = hero_data + if _in != self.clip_in or _out != self.clip_out: + continue + + data_product_name = hero_data["productName"] + # add track index in case duplicity of names in hero data + if self.product_name in data_product_name: + hero_data["productName"] = self.product_name + str( + self.track_index) + # in case track name and product name is the same then add + if self.base_product_name == self.track_name: + hero_data["productName"] = self.product_name + # assign data to return hierarchy data to tag + tag_hierarchy_data = hero_data # add data to return data dict self.tag_data.update(tag_hierarchy_data) @@ -859,8 +862,8 @@ class PublishClip: "hierarchy": hierarchy_filled, "parents": self.parents, "hierarchyData": hierarchy_formatting_data, - "subset": self.subset, - "family": self.subset_family + "productName": self.product_name, + "productType": self.product_type } def _convert_to_entity(self, key): diff --git a/client/ayon_core/hosts/resolve/api/workio.py b/client/ayon_core/hosts/resolve/api/workio.py index 5e4865ddc5..b6c2f63432 100644 --- a/client/ayon_core/hosts/resolve/api/workio.py +++ b/client/ayon_core/hosts/resolve/api/workio.py @@ -79,7 +79,7 @@ def open_file(filepath): def current_file(): pm = get_project_manager() file_ext = file_extensions()[0] - workdir_path = os.getenv("AVALON_WORKDIR") + workdir_path = os.getenv("AYON_WORKDIR") project = pm.GetCurrentProject() project_name = project.GetName() file_name = project_name + file_ext @@ -93,4 +93,4 @@ def current_file(): def work_root(session): - return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/") + return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/") diff --git a/client/ayon_core/hosts/resolve/plugins/create/create_shot_clip.py b/client/ayon_core/hosts/resolve/plugins/create/create_shot_clip.py index 65bc9fed9d..3a2a0345ea 100644 --- a/client/ayon_core/hosts/resolve/plugins/create/create_shot_clip.py +++ b/client/ayon_core/hosts/resolve/plugins/create/create_shot_clip.py @@ -10,12 +10,12 @@ class CreateShotClip(plugin.Creator): """Publishable clip""" label = "Create Publishable Clip" - family = "clip" + product_type = "clip" icon = "film" defaults = ["Main"] gui_tracks = get_video_track_names() - gui_name = "OpenPype publish attributes creator" + gui_name = "AYON publish attributes creator" gui_info = "Define sequential rename and fill hierarchy data." gui_inputs = { "renameHierarchy": { @@ -133,19 +133,19 @@ class CreateShotClip(plugin.Creator): "target": "ui", "order": 3, "value": { - "subsetName": { + "productName": { "value": ["", "main", "bg", "fg", "bg", "animatic"], "type": "QComboBox", - "label": "Subset Name", + "label": "Product Name", "target": "ui", - "toolTip": "chose subset name pattern, if is selected, name of track layer will be used", # noqa + "toolTip": "chose product name pattern, if is selected, name of track layer will be used", # noqa "order": 0}, - "subsetFamily": { + "productType": { "value": ["plate", "take"], "type": "QComboBox", - "label": "Subset Family", - "target": "ui", "toolTip": "What use of this subset is for", # noqa + "label": "Product type", + "target": "ui", "toolTip": "What use of this product is for", # noqa "order": 1}, "reviewTrack": { "value": ["< none >"] + gui_tracks, @@ -159,7 +159,7 @@ class CreateShotClip(plugin.Creator): "type": "QCheckBox", "label": "Include audio", "target": "tag", - "toolTip": "Process subsets with corresponding audio", # noqa + "toolTip": "Process products with corresponding audio", # noqa "order": 3}, "sourceResolution": { "value": False, diff --git a/client/ayon_core/hosts/resolve/plugins/load/load_clip.py b/client/ayon_core/hosts/resolve/plugins/load/load_clip.py index 47aeac213b..04b2aaaf15 100644 --- a/client/ayon_core/hosts/resolve/plugins/load/load_clip.py +++ b/client/ayon_core/hosts/resolve/plugins/load/load_clip.py @@ -15,7 +15,7 @@ from ayon_core.lib.transcoding import ( class LoadClip(plugin.TimelineItemLoader): - """Load a subset to timeline as clip + """Load a product to timeline as clip Place clip to timeline on its asset origin timings collected during conforming to project @@ -59,21 +59,21 @@ class LoadClip(plugin.TimelineItemLoader): self.__class__.__name__, data_imprint) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """ Updating previously loaded clips """ - context = get_representation_context(representation) + repre_doc = context["representation"] name = container['name'] namespace = container['namespace'] timeline_item = container["_timeline_item"] media_pool_item = timeline_item.GetMediaPoolItem() - files = plugin.get_representation_files(representation) + files = plugin.get_representation_files(repre_doc) loader = plugin.ClipLoader(self, context) timeline_item = loader.update(timeline_item, files) @@ -92,10 +92,10 @@ class LoadClip(plugin.TimelineItemLoader): def get_tag_data(self, context, name, namespace): """Return data to be imprinted on the timeline item marker""" - representation = context["representation"] - version = context['version'] - version_data = version.get("data", {}) - version_name = version.get("name", None) + repre_doc = context["representation"] + version_doc = context["version"] + version_data = version_doc.get("data", {}) + version_name = version_doc.get("name", None) colorspace = version_data.get("colorspace", None) object_name = "{}_{}".format(name, namespace) @@ -111,7 +111,7 @@ class LoadClip(plugin.TimelineItemLoader): # add variables related to version context data.update({ - "representation": str(representation["_id"]), + "representation": str(repre_doc["_id"]), "version": version_name, "colorspace": colorspace, "objectName": object_name diff --git a/client/ayon_core/hosts/resolve/plugins/publish/precollect_instances.py b/client/ayon_core/hosts/resolve/plugins/publish/precollect_instances.py index 0ae6206496..b1374859e3 100644 --- a/client/ayon_core/hosts/resolve/plugins/publish/precollect_instances.py +++ b/client/ayon_core/hosts/resolve/plugins/publish/precollect_instances.py @@ -2,6 +2,7 @@ from pprint import pformat import pyblish +from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID from ayon_core.hosts.resolve.api.lib import ( get_current_timeline_items, get_timeline_item_pype_tag, @@ -39,7 +40,9 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not tag_data: continue - if tag_data.get("id") != "pyblish.avalon.instance": + if tag_data.get("id") not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: continue media_pool_item = timeline_item.GetMediaPoolItem() @@ -61,12 +64,33 @@ class PrecollectInstances(pyblish.api.ContextPlugin): }) asset = tag_data["folder_path"] - subset = tag_data["subset"] + + # TODO: remove backward compatibility + product_name = tag_data.get("productName") + if product_name is None: + # backward compatibility: subset -> productName + product_name = tag_data.get("subset") + + # backward compatibility: product_name should not be missing + if not product_name: + self.log.error( + "Product name is not defined for: {}".format(asset)) + + # TODO: remove backward compatibility + product_type = tag_data.get("productType") + if product_type is None: + # backward compatibility: family -> productType + product_type = tag_data.get("family") + + # backward compatibility: product_type should not be missing + if not product_type: + self.log.error( + "Product type is not defined for: {}".format(asset)) data.update({ - "name": "{}_{}".format(asset, subset), - "label": "{} {}".format(asset, subset), - "asset": asset, + "name": "{}_{}".format(asset, product_name), + "label": "{} {}".format(asset, product_name), + "folderPath": asset, "item": timeline_item, "publish": get_publish_attribute(timeline_item), "fps": context.data["fps"], @@ -74,6 +98,9 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "handleEnd": handle_end, "newAssetPublishing": True, "families": ["clip"], + "productType": product_type, + "productName": product_name, + "family": product_type }) # otio clip data @@ -124,19 +151,20 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not hierarchy_data: return - asset = data["asset"] - subset = "shotMain" + asset = data["folderPath"] + product_name = "shotMain" # insert family into families - family = "shot" + product_type = "shot" data.update({ - "name": "{}_{}".format(asset, subset), - "label": "{} {}".format(asset, subset), - "subset": subset, - "asset": asset, - "family": family, - "families": [], + "name": "{}_{}".format(asset, product_name), + "label": "{} {}".format(asset, product_name), + "folderPath": asset, + "productName": product_name, + "productType": product_type, + "family": product_type, + "families": [product_type], "publish": get_publish_attribute(timeline_item) }) diff --git a/client/ayon_core/hosts/resolve/plugins/publish/precollect_workfile.py b/client/ayon_core/hosts/resolve/plugins/publish/precollect_workfile.py index 5f8cf6b5d9..a147c9a905 100644 --- a/client/ayon_core/hosts/resolve/plugins/publish/precollect_workfile.py +++ b/client/ayon_core/hosts/resolve/plugins/publish/precollect_workfile.py @@ -17,7 +17,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): current_asset_name = get_current_asset_name() asset_name = current_asset_name.split("/")[-1] - subset = "workfileMain" + product_name = "workfileMain" project = rapi.get_current_project() fps = project.GetSetting("timelineFrameRate") video_tracks = rapi.get_video_track_names() @@ -26,11 +26,12 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): otio_timeline = davinci_export.create_otio_timeline(project) instance_data = { - "name": "{}_{}".format(asset_name, subset), - "label": "{} {}".format(current_asset_name, subset), - "asset": current_asset_name, - "subset": subset, + "name": "{}_{}".format(asset_name, product_name), + "label": "{} {}".format(current_asset_name, product_name), "item": project, + "folderPath": current_asset_name, + "productName": product_name, + "productType": "workfile", "family": "workfile", "families": [] } diff --git a/client/ayon_core/hosts/resolve/startup.py b/client/ayon_core/hosts/resolve/startup.py index 174a2878c5..b3c1a024d9 100644 --- a/client/ayon_core/hosts/resolve/startup.py +++ b/client/ayon_core/hosts/resolve/startup.py @@ -33,7 +33,7 @@ def ensure_installed_host(): def launch_menu(): - print("Launching Resolve OpenPype menu..") + print("Launching Resolve AYON menu..") ensure_installed_host() ayon_core.hosts.resolve.api.launch_pype_menu() @@ -54,7 +54,7 @@ def main(): else: log.info("No last workfile set to open. Skipping..") - # Launch OpenPype menu + # Launch AYON menu from ayon_core.settings import get_project_settings from ayon_core.pipeline.context_tools import get_current_project_name project_name = get_current_project_name() @@ -62,7 +62,7 @@ def main(): settings = get_project_settings(project_name) if settings.get("resolve", {}).get("launch_openpype_menu_on_start", True): - log.info("Launching OpenPype menu..") + log.info("Launching AYON menu..") launch_menu() diff --git a/client/ayon_core/hosts/substancepainter/addon.py b/client/ayon_core/hosts/substancepainter/addon.py index a7f21c2288..26829d3153 100644 --- a/client/ayon_core/hosts/substancepainter/addon.py +++ b/client/ayon_core/hosts/substancepainter/addon.py @@ -1,16 +1,13 @@ import os -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon SUBSTANCE_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) -class SubstanceAddon(OpenPypeModule, IHostAddon): +class SubstanceAddon(AYONAddon, IHostAddon): name = "substancepainter" host_name = "substancepainter" - def initialize(self, module_settings): - self.enabled = True - def add_implementation_envs(self, env, _app): # Add requirements to SUBSTANCE_PAINTER_PLUGINS_PATH plugin_path = os.path.join(SUBSTANCE_HOST_DIR, "deploy") diff --git a/client/ayon_core/hosts/substancepainter/api/pipeline.py b/client/ayon_core/hosts/substancepainter/api/pipeline.py index 2bbcf2aded..843c120d8e 100644 --- a/client/ayon_core/hosts/substancepainter/api/pipeline.py +++ b/client/ayon_core/hosts/substancepainter/api/pipeline.py @@ -14,7 +14,7 @@ import pyblish.api from ayon_core.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost from ayon_core.settings import ( get_current_project_settings, - get_system_settings + get_project_settings, ) from ayon_core.pipeline.template_data import get_template_data_with_names @@ -240,33 +240,34 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): def _install_shelves(self, project_settings): - shelves = project_settings["substancepainter"].get("shelves", {}) + shelves = project_settings["substancepainter"].get("shelves", []) if not shelves: return # Prepare formatting data if we detect any path which might have # template tokens like {asset} in there. formatting_data = {} - has_formatting_entries = any("{" in path for path in shelves.values()) + has_formatting_entries = any("{" in item["value"] for item in shelves) if has_formatting_entries: project_name = self.get_current_project_name() asset_name = self.get_current_asset_name() task_name = self.get_current_asset_name() - system_settings = get_system_settings() - formatting_data = get_template_data_with_names(project_name, - asset_name, - task_name, - system_settings) + project_settings = get_project_settings(project_name) + formatting_data = get_template_data_with_names( + project_name, asset_name, task_name, project_settings + ) anatomy = Anatomy(project_name) formatting_data["root"] = anatomy.roots - for name, path in shelves.items(): - shelf_name = None + for shelve_item in shelves: # Allow formatting with anatomy for the paths + path = shelve_item["value"] if "{" in path: path = StringTemplate.format_template(path, formatting_data) + name = shelve_item["name"] + shelf_name = None try: shelf_name = lib.load_shelf(path, name=name) except ValueError as exc: diff --git a/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py b/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py index 831ab6bb23..f204ff7728 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py +++ b/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py @@ -24,12 +24,12 @@ class CreateTextures(Creator): """Create a texture set.""" identifier = "io.openpype.creators.substancepainter.textureset" label = "Textures" - family = "textureSet" + product_type = "textureSet" icon = "picture-o" default_variant = "Main" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): if not substance_painter.project.is_open(): raise CreatorError("Can't create a Texture Set instance without " @@ -47,7 +47,7 @@ class CreateTextures(Creator): if key in pre_create_data: creator_attributes[key] = pre_create_data[key] - instance = self.create_instance_in_context(subset_name, + instance = self.create_instance_in_context(product_name, instance_data) set_instance( instance_id=instance["instance_id"], @@ -57,7 +57,7 @@ class CreateTextures(Creator): def collect_instances(self): for instance in get_instances(): if (instance.get("creator_identifier") == self.identifier or - instance.get("family") == self.family): + instance.get("productType") == self.product_type): self.create_instance_in_context_from_existing(instance) def update_instances(self, update_list): @@ -75,9 +75,9 @@ class CreateTextures(Creator): self._remove_instance_from_context(instance) # Helper methods (this might get moved into Creator class) - def create_instance_in_context(self, subset_name, data): + def create_instance_in_context(self, product_name, data): instance = CreatedInstance( - self.family, subset_name, data, self + self.product_type, product_name, data, self ) self.create_context.creator_adds_instance(instance) return instance diff --git a/client/ayon_core/hosts/substancepainter/plugins/create/create_workfile.py b/client/ayon_core/hosts/substancepainter/plugins/create/create_workfile.py index a51b7d859b..23811dfd29 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/substancepainter/plugins/create/create_workfile.py @@ -17,7 +17,7 @@ class CreateWorkfile(AutoCreator): """Workfile auto-creator.""" identifier = "io.openpype.creators.substancepainter.workfile" label = "Workfile" - family = "workfile" + product_type = "workfile" icon = "document" default_variant = "Main" @@ -49,15 +49,19 @@ class CreateWorkfile(AutoCreator): if current_instance is None: self.log.info("Auto-creating workfile instance...") asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + variant, + host_name, ) data = { "folderPath": asset_name, "task": task_name, "variant": variant } - current_instance = self.create_instance_in_context(subset_name, + current_instance = self.create_instance_in_context(product_name, data) elif ( current_instance_asset != asset_name @@ -65,12 +69,16 @@ class CreateWorkfile(AutoCreator): ): # Update instance context if is not the same asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + product_name = self.get_product_name( + project_name, + asset_doc, + task_name, + variant, + host_name, ) current_instance["folderPath"] = asset_name current_instance["task"] = task_name - current_instance["subset"] = subset_name + current_instance["productName"] = product_name set_instance( instance_id=current_instance.get("instance_id"), @@ -80,7 +88,7 @@ class CreateWorkfile(AutoCreator): def collect_instances(self): for instance in get_instances(): if (instance.get("creator_identifier") == self.identifier or - instance.get("family") == self.family): + instance.get("productType") == self.product_type): self.create_instance_in_context_from_existing(instance) def update_instances(self, update_list): @@ -93,9 +101,9 @@ class CreateWorkfile(AutoCreator): set_instances(instance_data_by_id, update=True) # Helper methods (this might get moved into Creator class) - def create_instance_in_context(self, subset_name, data): + def create_instance_in_context(self, product_name, data): instance = CreatedInstance( - self.family, subset_name, data, self + self.product_type, product_name, data, self ) self.create_context.creator_adds_instance(instance) return instance diff --git a/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py b/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py index 48aa99d357..810fecb8e5 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py +++ b/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py @@ -97,12 +97,13 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): set_container_metadata(project_mesh_object_name, container) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): + repre_doc = context["representation"] - path = get_representation_path(representation) + path = get_representation_path(repre_doc) # Reload the mesh container_options = container.get("options", {}) @@ -121,7 +122,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): # Update container representation object_name = container["objectName"] - update_data = {"representation": str(representation["_id"])} + update_data = {"representation": str(repre_doc["_id"])} set_container_metadata(object_name, update_data, update=True) def remove(self, container): diff --git a/client/ayon_core/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/client/ayon_core/hosts/substancepainter/plugins/publish/collect_textureset_images.py index b8279c99cd..9cd77e8f90 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/client/ayon_core/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -9,7 +9,7 @@ from ayon_core.hosts.substancepainter.api.lib import ( get_parsed_export_maps, strip_template ) -from ayon_core.pipeline.create import get_subset_name +from ayon_core.pipeline.create import get_product_name from ayon_core.client import get_asset_by_name @@ -27,15 +27,15 @@ class CollectTextureSet(pyblish.api.InstancePlugin): config = self.get_export_config(instance) asset_doc = get_asset_by_name( - project_name=instance.context.data["projectName"], - asset_name=instance.data["asset"] + instance.context.data["projectName"], + instance.data["folderPath"] ) instance.data["exportConfig"] = config maps = get_parsed_export_maps(config) # Let's break the instance into multiple instances to integrate - # a subset per generated texture or texture UDIM sequence + # a product per generated texture or texture UDIM sequence for (texture_set_name, stack_name), template_maps in maps.items(): self.log.info(f"Processing {texture_set_name}/{stack_name}") for template, outputs in template_maps.items(): @@ -49,7 +49,7 @@ class CollectTextureSet(pyblish.api.InstancePlugin): asset_doc, texture_set_name, stack_name): """Create a new instance per image or UDIM sequence. - The new instances will be of family `image`. + The new instances will be of product type `image`. """ @@ -66,7 +66,7 @@ class CollectTextureSet(pyblish.api.InstancePlugin): ) # Define the suffix we want to give this particular texture - # set and set up a remapped subset naming for it. + # set and set up a remapped product naming for it. suffix = "" if always_include_texture_set_name or len(all_texture_sets) > 1: # More than one texture set, include texture set name @@ -79,15 +79,16 @@ class CollectTextureSet(pyblish.api.InstancePlugin): map_identifier = strip_template(template) suffix += f".{map_identifier}" - image_subset = get_subset_name( - # TODO: The family actually isn't 'texture' currently but for now - # this is only done so the subset name starts with 'texture' - family="texture", - variant=instance.data["variant"] + suffix, - task_name=instance.data.get("task"), - asset_doc=asset_doc, + image_product_name = get_product_name( + # TODO: The product type actually isn't 'texture' currently but + # for now this is only done so the product name starts with + # 'texture' project_name=context.data["projectName"], + asset_doc=asset_doc, + task_name=instance.data.get("task"), host_name=context.data["hostName"], + product_type="texture", + variant=instance.data["variant"] + suffix, project_settings=context.data["project_settings"] ) @@ -112,18 +113,20 @@ class CollectTextureSet(pyblish.api.InstancePlugin): representation["stagingDir"] = staging_dir # Clone the instance - image_instance = context.create_instance(image_subset) + product_type = "image" + image_instance = context.create_instance(image_product_name) image_instance[:] = instance[:] image_instance.data.update(copy.deepcopy(dict(instance.data))) - image_instance.data["name"] = image_subset - image_instance.data["label"] = image_subset - image_instance.data["subset"] = image_subset - image_instance.data["family"] = "image" - image_instance.data["families"] = ["image", "textures"] + image_instance.data["name"] = image_product_name + image_instance.data["label"] = image_product_name + image_instance.data["productName"] = image_product_name + image_instance.data["productType"] = product_type + image_instance.data["family"] = product_type + image_instance.data["families"] = [product_type, "textures"] image_instance.data["representations"] = [representation] # Group the textures together in the loader - image_instance.data["subsetGroup"] = instance.data["subset"] + image_instance.data["subsetGroup"] = image_product_name # Store the texture set name and stack name on the instance image_instance.data["textureSetName"] = texture_set_name @@ -133,7 +136,7 @@ class CollectTextureSet(pyblish.api.InstancePlugin): # Note: The extractor will assign it to the representation colorspace = outputs[0].get("colorSpace") if colorspace: - self.log.debug(f"{image_subset} colorspace: {colorspace}") + self.log.debug(f"{image_product_name} colorspace: {colorspace}") image_instance.data["colorspace"] = colorspace # Store the instance in the original instance as a member diff --git a/client/ayon_core/hosts/traypublisher/addon.py b/client/ayon_core/hosts/traypublisher/addon.py index d8fc5ed105..70bdfe9a64 100644 --- a/client/ayon_core/hosts/traypublisher/addon.py +++ b/client/ayon_core/hosts/traypublisher/addon.py @@ -2,9 +2,9 @@ import os from ayon_core.lib import get_ayon_launcher_args from ayon_core.lib.execute import run_detached_process -from ayon_core.modules import ( +from ayon_core.addon import ( click_wrap, - OpenPypeModule, + AYONAddon, ITrayAction, IHostAddon, ) @@ -12,13 +12,12 @@ from ayon_core.modules import ( TRAYPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class TrayPublishAddon(OpenPypeModule, IHostAddon, ITrayAction): +class TrayPublishAddon(AYONAddon, IHostAddon, ITrayAction): label = "Publisher" name = "traypublisher" host_name = "traypublisher" - def initialize(self, modules_settings): - self.enabled = True + def initialize(self, settings): self.publish_paths = [ os.path.join(TRAYPUBLISH_ROOT_DIR, "plugins", "publish") ] @@ -36,7 +35,7 @@ class TrayPublishAddon(OpenPypeModule, IHostAddon, ITrayAction): def run_traypublisher(self): args = get_ayon_launcher_args( - "module", self.name, "launch" + "addon", self.name, "launch" ) run_detached_process(args) diff --git a/client/ayon_core/hosts/traypublisher/api/editorial.py b/client/ayon_core/hosts/traypublisher/api/editorial.py index d84a7200c8..6153bc5752 100644 --- a/client/ayon_core/hosts/traypublisher/api/editorial.py +++ b/client/ayon_core/hosts/traypublisher/api/editorial.py @@ -16,25 +16,31 @@ class ShotMetadataSolver: NO_DECOR_PATERN = re.compile(r"\{([a-z]*?)\}") - # presets - clip_name_tokenizer = None - shot_rename = True - shot_hierarchy = None - shot_add_tasks = None + def __init__(self, logger): + self.clip_name_tokenizer = [] + self.shot_rename = { + "enabled": False, + "shot_rename_template": "", + } + self.shot_hierarchy = { + "enabled": False, + "parents": [], + "parents_path": "", + } + self.shot_add_tasks = [] + self.log = logger - def __init__( + def update_data( self, clip_name_tokenizer, shot_rename, shot_hierarchy, - shot_add_tasks, - logger + shot_add_tasks ): self.clip_name_tokenizer = clip_name_tokenizer self.shot_rename = shot_rename self.shot_hierarchy = shot_hierarchy self.shot_add_tasks = shot_add_tasks - self.log = logger def _rename_template(self, data): """Shot renaming function @@ -86,7 +92,9 @@ class ShotMetadataSolver: search_text = parent_name + clip_name - for token_key, pattern in self.clip_name_tokenizer.items(): + for clip_name_item in self.clip_name_tokenizer: + token_key = clip_name_item["name"] + pattern = clip_name_item["regex"] p = re.compile(pattern) match = p.findall(search_text) if not match: @@ -137,11 +145,11 @@ class ShotMetadataSolver: )) _parent_tokens_type = { - parent_token["name"]: parent_token["type"] + parent_token["name"]: parent_token["parent_type"] for parent_token in hierarchy_parents } for _index, _parent in enumerate( - shot_hierarchy["parents_path"].split("/") + shot_hierarchy["parents_path"].split("/") ): # format parent token with value which is formatted try: @@ -262,22 +270,22 @@ class ShotMetadataSolver: """ tasks_to_add = {} - project_tasks = project_doc["config"]["tasks"] - for task_name, task_data in self.shot_add_tasks.items(): - _task_data = deepcopy(task_data) + project_task_types = project_doc["config"]["tasks"] + for task_item in self.shot_add_tasks: + task_name = task_item["name"] + task_type = task_item["task_type"] # check if task type in project task types - if _task_data["type"] in project_tasks.keys(): - tasks_to_add[task_name] = _task_data - else: + if task_type not in project_task_types.keys(): raise KeyError( "Missing task type `{}` for `{}` is not" " existing in `{}``".format( - _task_data["type"], + task_type, task_name, - list(project_tasks.keys()) + list(project_task_types.keys()) ) ) + tasks_to_add[task_name] = {"type": task_type} return tasks_to_add diff --git a/client/ayon_core/hosts/traypublisher/api/pipeline.py b/client/ayon_core/hosts/traypublisher/api/pipeline.py index 87177705c9..f4526ddf4b 100644 --- a/client/ayon_core/hosts/traypublisher/api/pipeline.py +++ b/client/ayon_core/hosts/traypublisher/api/pipeline.py @@ -7,7 +7,6 @@ import pyblish.api from ayon_core.pipeline import ( register_creator_plugin_path, - legacy_io, ) from ayon_core.host import HostBase, IPublishHost @@ -23,8 +22,7 @@ class TrayPublisherHost(HostBase, IPublishHost): name = "traypublisher" def install(self): - os.environ["AVALON_APP"] = self.name - legacy_io.Session["AVALON_APP"] = self.name + os.environ["AYON_HOST_NAME"] = self.name pyblish.api.register_host("traypublisher") pyblish.api.register_plugin_path(PUBLISH_PATH) @@ -42,9 +40,7 @@ class TrayPublisherHost(HostBase, IPublishHost): def set_project_name(self, project_name): # TODO Deregister project specific plugins and register new project # plugins - os.environ["AVALON_PROJECT"] = project_name - legacy_io.Session["AVALON_PROJECT"] = project_name - legacy_io.install() + os.environ["AYON_PROJECT_NAME"] = project_name HostContext.set_project_name(project_name) diff --git a/client/ayon_core/hosts/traypublisher/api/plugin.py b/client/ayon_core/hosts/traypublisher/api/plugin.py index 77a8f23d2e..be50383510 100644 --- a/client/ayon_core/hosts/traypublisher/api/plugin.py +++ b/client/ayon_core/hosts/traypublisher/api/plugin.py @@ -111,7 +111,7 @@ class SettingsCreator(TrayPublishCreator): extensions = [] - def create(self, subset_name, data, pre_create_data): + def create(self, product_name, data, pre_create_data): # Pass precreate data to creator attributes thumbnail_path = pre_create_data.pop(PRE_CREATE_THUMBNAIL_KEY, None) @@ -119,8 +119,8 @@ class SettingsCreator(TrayPublishCreator): if self.allow_version_control: asset_name = data["folderPath"] subset_docs_by_asset_id = self._prepare_next_versions( - [asset_name], [subset_name]) - version = subset_docs_by_asset_id[asset_name].get(subset_name) + [asset_name], [product_name]) + version = subset_docs_by_asset_id[asset_name].get(product_name) pre_create_data["version_to_use"] = version data["_previous_last_version"] = version @@ -128,38 +128,41 @@ class SettingsCreator(TrayPublishCreator): data["settings_creator"] = True # Create new instance - new_instance = CreatedInstance(self.family, subset_name, data, self) + new_instance = CreatedInstance( + self.product_type, product_name, data, self + ) self._store_new_instance(new_instance) if thumbnail_path: self.set_instance_thumbnail_path(new_instance.id, thumbnail_path) - def _prepare_next_versions(self, asset_names, subset_names): - """Prepare next versions for given asset and subset names. + def _prepare_next_versions(self, asset_names, product_names): + """Prepare next versions for given asset and product names. Todos: - Expect combination of subset names by asset name to avoid - unnecessary server calls for unused subsets. + Expect combination of product names by asset name to avoid + unnecessary server calls for unused products. Args: asset_names (Iterable[str]): Asset names. - subset_names (Iterable[str]): Subset names. + product_names (Iterable[str]): Subset names. Returns: dict[str, dict[str, int]]: Last versions by asset - and subset names. + and product names. """ # Prepare all versions for all combinations to '1' + # TODO use 'ayon_core.pipeline.version_start' logic subset_docs_by_asset_id = { asset_name: { - subset_name: 1 - for subset_name in subset_names + product_name: 1 + for product_name in product_names } for asset_name in asset_names } - if not asset_names or not subset_names: + if not asset_names or not product_names: return subset_docs_by_asset_id asset_docs = get_assets( @@ -174,26 +177,26 @@ class SettingsCreator(TrayPublishCreator): subset_docs = list(get_subsets( self.project_name, asset_ids=asset_names_by_id.keys(), - subset_names=subset_names, + subset_names=product_names, fields=["_id", "name", "parent"] )) - subset_ids = {subset_doc["_id"] for subset_doc in subset_docs} + product_ids = {subset_doc["_id"] for subset_doc in subset_docs} last_versions = get_last_versions( self.project_name, - subset_ids, + product_ids, fields=["name", "parent"]) for subset_doc in subset_docs: asset_id = subset_doc["parent"] asset_name = asset_names_by_id[asset_id] - subset_name = subset_doc["name"] - subset_id = subset_doc["_id"] - last_version = last_versions.get(subset_id) + product_name = subset_doc["name"] + product_id = subset_doc["_id"] + last_version = last_versions.get(product_id) version = 0 if last_version is not None: version = last_version["name"] - subset_docs_by_asset_id[asset_name][subset_name] += version + subset_docs_by_asset_id[asset_name][product_name] += version return subset_docs_by_asset_id def _fill_next_versions(self, instances_data): @@ -223,16 +226,16 @@ class SettingsCreator(TrayPublishCreator): instance["folderPath"] for instance in filtered_instance_data } - subset_names = { - instance["subset"] + product_names = { + instance["productName"] for instance in filtered_instance_data} subset_docs_by_asset_id = self._prepare_next_versions( - asset_names, subset_names + asset_names, product_names ) for instance in filtered_instance_data: asset_name = instance["folderPath"] - subset_name = instance["subset"] - version = subset_docs_by_asset_id[asset_name][subset_name] + product_name = instance["productName"] + version = subset_docs_by_asset_id[asset_name][product_name] instance["creator_attributes"]["version_to_use"] = version instance["_previous_last_version"] = version @@ -311,14 +314,14 @@ class SettingsCreator(TrayPublishCreator): @classmethod def from_settings(cls, item_data): identifier = item_data["identifier"] - family = item_data["family"] + product_type = item_data["product_type"] if not identifier: - identifier = "settings_{}".format(family) + identifier = "settings_{}".format(product_type) return type( "{}{}".format(cls.__name__, identifier), (cls, ), { - "family": family, + "product_type": product_type, "identifier": identifier, "label": item_data["label"].strip(), "icon": item_data["icon"], diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_colorspace_look.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_colorspace_look.py index 3969294f1e..5c913b3289 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_colorspace_look.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_colorspace_look.py @@ -23,7 +23,7 @@ class CreateColorspaceLook(TrayPublishCreator): identifier = "io.openpype.creators.traypublisher.colorspace_look" label = "Colorspace Look" - family = "ociolook" + product_type = "ociolook" description = "Publishes color space look file." extensions = [".cc", ".cube", ".3dl", ".spi1d", ".spi3d", ".csp", ".lut"] enabled = False @@ -44,7 +44,7 @@ This creator publishes color space look file (LUT). def get_icon(self): return "mdi.format-color-fill" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): repr_file = pre_create_data.get("luts_file") if not repr_file: raise CreatorError("No files specified") @@ -58,11 +58,11 @@ This creator publishes color space look file (LUT). asset_doc = get_asset_by_name( self.project_name, asset_name) - subset_name = self.get_subset_name( - variant=instance_data["variant"], - task_name=instance_data["task"] or "Not set", + product_name = self.get_product_name( project_name=self.project_name, asset_doc=asset_doc, + task_name=instance_data["task"] or "Not set", + variant=instance_data["variant"], ) instance_data["creator_attributes"] = { @@ -71,7 +71,7 @@ This creator publishes color space look file (LUT). } # Create new instance - new_instance = CreatedInstance(self.family, subset_name, + new_instance = CreatedInstance(self.product_type, product_name, instance_data, self) new_instance.transient_data["config_items"] = self.config_items new_instance.transient_data["config_data"] = self.config_data @@ -148,7 +148,7 @@ This creator publishes color space look file (LUT). ) ] - def apply_settings(self, project_settings, system_settings): + def apply_settings(self, project_settings): host = self.create_context.host host_name = host.name project_name = host.get_current_project_name() diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py index 51a67a871e..a7abd3e6db 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py @@ -60,19 +60,15 @@ CLIP_ATTR_DEFS = [ class EditorialClipInstanceCreatorBase(HiddenTrayPublishCreator): - """ Wrapper class for clip family creators - - Args: - HiddenTrayPublishCreator (BaseCreator): hidden supporting class - """ + """Wrapper class for clip product type creators.""" host_name = "traypublisher" def create(self, instance_data, source_data=None): - subset_name = instance_data["subset"] + product_name = instance_data["productName"] # Create new instance new_instance = CreatedInstance( - self.family, subset_name, instance_data, self + self.product_type, product_name, instance_data, self ) self._store_new_instance(new_instance) @@ -90,15 +86,12 @@ class EditorialClipInstanceCreatorBase(HiddenTrayPublishCreator): class EditorialShotInstanceCreator(EditorialClipInstanceCreatorBase): - """ Shot family class + """Shot product type class The shot metadata instance carrier. - - Args: - EditorialClipInstanceCreatorBase (BaseCreator): hidden supporting class """ identifier = "editorial_shot" - family = "shot" + product_type = "shot" label = "Editorial Shot" def get_instance_attr_defs(self): @@ -113,57 +106,48 @@ class EditorialShotInstanceCreator(EditorialClipInstanceCreatorBase): class EditorialPlateInstanceCreator(EditorialClipInstanceCreatorBase): - """ Plate family class + """Plate product type class Plate representation instance. - - Args: - EditorialClipInstanceCreatorBase (BaseCreator): hidden supporting class """ identifier = "editorial_plate" - family = "plate" + product_type = "plate" label = "Editorial Plate" class EditorialAudioInstanceCreator(EditorialClipInstanceCreatorBase): - """ Audio family class + """Audio product type class Audio representation instance. - - Args: - EditorialClipInstanceCreatorBase (BaseCreator): hidden supporting class """ identifier = "editorial_audio" - family = "audio" + product_type = "audio" label = "Editorial Audio" class EditorialReviewInstanceCreator(EditorialClipInstanceCreatorBase): - """ Review family class + """Review product type class Review representation instance. - - Args: - EditorialClipInstanceCreatorBase (BaseCreator): hidden supporting class """ identifier = "editorial_review" - family = "review" + product_type = "review" label = "Editorial Review" class EditorialSimpleCreator(TrayPublishCreator): - """ Editorial creator class + """Editorial creator class Simple workflow creator. This creator only disecting input video file into clip chunks and then converts each to - defined format defined Settings for each subset preset. + defined format defined Settings for each product preset. Args: TrayPublishCreator (Creator): Tray publisher plugin class """ label = "Editorial Simple" - family = "editorial" + product_type = "editorial" identifier = "editorial_simple" default_variants = [ "main" @@ -174,46 +158,42 @@ Supporting publishing new shots to project or updating already created. Publishing will create OTIO file. """ icon = "fa.file" + product_type_presets = [] - def __init__( - self, project_settings, *args, **kwargs - ): - super(EditorialSimpleCreator, self).__init__( - project_settings, *args, **kwargs - ) + def __init__(self, *args, **kwargs): + self._shot_metadata_solver = ShotMetadataSolver(self.log) + super(EditorialSimpleCreator, self).__init__(*args, **kwargs) + + def apply_settings(self, project_settings): editorial_creators = deepcopy( project_settings["traypublisher"]["editorial_creators"] ) - # get this creator settings by identifier - self._creator_settings = editorial_creators.get(self.identifier) + creator_settings = editorial_creators.get(self.identifier) - clip_name_tokenizer = self._creator_settings["clip_name_tokenizer"] - shot_rename = self._creator_settings["shot_rename"] - shot_hierarchy = self._creator_settings["shot_hierarchy"] - shot_add_tasks = self._creator_settings["shot_add_tasks"] - - self._shot_metadata_solver = ShotMetadataSolver( - clip_name_tokenizer, - shot_rename, - shot_hierarchy, - shot_add_tasks, - self.log + self._shot_metadata_solver.update_data( + creator_settings["clip_name_tokenizer"], + creator_settings["shot_rename"], + creator_settings["shot_hierarchy"], + creator_settings["shot_add_tasks"] ) + self.product_type_presets = creator_settings["product_type_presets"] + default_variants = creator_settings.get("default_variants") + if default_variants: + self.default_variants = default_variants - # try to set main attributes from settings - if self._creator_settings.get("default_variants"): - self.default_variants = self._creator_settings["default_variants"] - - def create(self, subset_name, instance_data, pre_create_data): - allowed_family_presets = self._get_allowed_family_presets( + def create(self, product_name, instance_data, pre_create_data): + allowed_product_type_presets = self._get_allowed_product_type_presets( pre_create_data) + product_types = { + item["product_type"] + for item in self.product_type_presets + } clip_instance_properties = { - k: v for k, v in pre_create_data.items() + k: v + for k, v in pre_create_data.items() if k != "sequence_filepath_data" - if k not in [ - i["family"] for i in self._creator_settings["family_presets"] - ] + if k not in product_types } asset_name = instance_data["folderPath"] @@ -255,7 +235,7 @@ or updating already created. Publishing will create OTIO file. otio_timeline, media_path, clip_instance_properties, - allowed_family_presets, + allowed_product_type_presets, os.path.basename(seq_path), first_otio_timeline ) @@ -266,7 +246,7 @@ or updating already created. Publishing will create OTIO file. # create otio editorial instance self._create_otio_instance( - subset_name, + product_name, instance_data, seq_path, media_path, first_otio_timeline @@ -274,7 +254,7 @@ or updating already created. Publishing will create OTIO file. def _create_otio_instance( self, - subset_name, + product_name, data, sequence_path, media_path, @@ -283,7 +263,7 @@ or updating already created. Publishing will create OTIO file. """Otio instance creating function Args: - subset_name (str): name of subset + product_name (str): Product name. data (dict): instance data sequence_path (str): path to sequence file media_path (str): path to media file @@ -296,7 +276,7 @@ or updating already created. Publishing will create OTIO file. "otioTimeline": otio.adapters.write_to_string(otio_timeline) }) new_instance = CreatedInstance( - self.family, subset_name, data, self + self.product_type, product_name, data, self ) self._store_new_instance(new_instance) @@ -355,7 +335,7 @@ or updating already created. Publishing will create OTIO file. otio_timeline, media_path, instance_data, - family_presets, + product_type_presets, sequence_file_name, first_otio_timeline=None ): @@ -365,7 +345,7 @@ or updating already created. Publishing will create OTIO file. otio_timeline (otio.Timeline): otio timeline object media_path (str): media file path string instance_data (dict): clip instance data - family_presets (list): list of dict settings subset presets + product_type_presets (list): list of dict settings product presets """ tracks = [ @@ -411,17 +391,17 @@ or updating already created. Publishing will create OTIO file. "instance_id": None } - for _fpreset in family_presets: - # exclude audio family if no audio stream + for product_type_preset in product_type_presets: + # exclude audio product type if no audio stream if ( - _fpreset["family"] == "audio" + product_type_preset["product_type"] == "audio" and not media_data.get("audio") ): continue - instance = self._make_subset_instance( + instance = self._make_product_instance( otio_clip, - _fpreset, + product_type_preset, deepcopy(base_instance_data), parenting_data ) @@ -530,33 +510,33 @@ or updating already created. Publishing will create OTIO file. return return_data - def _make_subset_instance( + def _make_product_instance( self, otio_clip, - preset, + product_type_preset, instance_data, parenting_data ): - """Making subset instance from input preset + """Making product instance from input preset Args: otio_clip (otio.Clip): otio clip object - preset (dict): single family preset + product_type_preset (dict): single product type preset instance_data (dict): instance data parenting_data (dict): shot instance parent data Returns: CreatedInstance: creator instance object """ - family = preset["family"] - label = self._make_subset_naming( - preset, + product_type = product_type_preset["product_type"] + label = self._make_product_naming( + product_type_preset, instance_data ) instance_data["label"] = label - # add file extension filter only if it is not shot family - if family == "shot": + # add file extension filter only if it is not shot product type + if product_type == "shot": instance_data["otioClip"] = ( otio.adapters.write_to_string(otio_clip)) c_instance = self.create_context.creators[ @@ -569,15 +549,15 @@ or updating already created. Publishing will create OTIO file. else: # add review family if defined instance_data.update({ - "outputFileType": preset["output_file_type"], + "outputFileType": product_type_preset["output_file_type"], "parent_instance_id": parenting_data["instance_id"], "creator_attributes": { "parent_instance": parenting_data["instance_label"], - "add_review_family": preset.get("review") + "add_review_family": product_type_preset.get("review") } }) - creator_identifier = f"editorial_{family}" + creator_identifier = f"editorial_{product_type}" editorial_clip_creator = self.create_context.creators[ creator_identifier] c_instance = editorial_clip_creator.create( @@ -585,15 +565,11 @@ or updating already created. Publishing will create OTIO file. return c_instance - def _make_subset_naming( - self, - preset, - instance_data - ): - """ Subset name maker + def _make_product_naming(self, product_type_preset, instance_data): + """Subset name maker Args: - preset (dict): single preset item + product_type_preset (dict): single preset item instance_data (dict): instance data Returns: @@ -602,25 +578,25 @@ or updating already created. Publishing will create OTIO file. asset_name = instance_data["creator_attributes"]["folderPath"] variant_name = instance_data["variant"] - family = preset["family"] + product_type = product_type_preset["product_type"] # get variant name from preset or from inheritance - _variant_name = preset.get("variant") or variant_name + _variant_name = product_type_preset.get("variant") or variant_name - # subset name - subset_name = "{}{}".format( - family, _variant_name.capitalize() + # product name + product_name = "{}{}".format( + product_type, _variant_name.capitalize() ) label = "{} {}".format( asset_name, - subset_name + product_name ) instance_data.update({ - "family": family, "label": label, "variant": _variant_name, - "subset": subset_name, + "productType": product_type, + "productName": product_name, }) return label @@ -631,7 +607,7 @@ or updating already created. Publishing will create OTIO file. instance_data, track_start_frame, ): - """ Factoring basic set of instance data. + """Factoring basic set of instance data. Args: otio_clip (otio.Clip): otio clip object @@ -763,8 +739,8 @@ or updating already created. Publishing will create OTIO file. "sourceOut": int(source_out) } - def _get_allowed_family_presets(self, pre_create_data): - """ Filter out allowed family presets. + def _get_allowed_product_type_presets(self, pre_create_data): + """Filter out allowed product type presets. Args: pre_create_data (dict): precreate attributes inputs @@ -773,10 +749,11 @@ or updating already created. Publishing will create OTIO file. list: lit of dict with preset items """ return [ - {"family": "shot"}, + {"product_type": "shot"}, *[ - preset for preset in self._creator_settings["family_presets"] - if pre_create_data[preset["family"]] + preset + for preset in self.product_type_presets + if pre_create_data[preset["product_type"]] ] ] @@ -809,7 +786,7 @@ or updating already created. Publishing will create OTIO file. return True def get_pre_create_attr_defs(self): - """ Creating pre-create attributes at creator plugin. + """Creating pre-create attributes at creator plugin. Returns: list: list of attribute object instances @@ -853,8 +830,8 @@ or updating already created. Publishing will create OTIO file. ] # add variants swithers attr_defs.extend( - BoolDef(_var["family"], label=_var["family"]) - for _var in self._creator_settings["family_presets"] + BoolDef(item["product_type"], label=item["product_type"]) + for item in self.product_type_presets ) attr_defs.append(UISeparatorDef()) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_from_settings.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_from_settings.py index 20f8dd792a..fe7ba4c4a4 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_from_settings.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_from_settings.py @@ -8,14 +8,13 @@ log = Logger.get_logger(__name__) def initialize(): from ayon_core.hosts.traypublisher.api.plugin import SettingsCreator - project_name = os.environ["AVALON_PROJECT"] + project_name = os.environ["AYON_PROJECT_NAME"] project_settings = get_project_settings(project_name) simple_creators = project_settings["traypublisher"]["simple_creators"] global_variables = globals() for item in simple_creators: - dynamic_plugin = SettingsCreator.from_settings(item) global_variables[dynamic_plugin.__name__] = dynamic_plugin diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_movie_batch.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_movie_batch.py index 274495855b..9b3dfdd334 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_movie_batch.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_movie_batch.py @@ -11,7 +11,7 @@ from ayon_core.pipeline import ( CreatedInstance, ) from ayon_core.pipeline.create import ( - get_subset_name, + get_product_name, TaskNotSetError, ) @@ -29,7 +29,7 @@ class BatchMovieCreator(TrayPublishCreator): """ identifier = "render_movie_batch" label = "Batch Movies" - family = "render" + product_type = "render" description = "Publish batch of video files" create_allow_context_change = False @@ -48,7 +48,7 @@ class BatchMovieCreator(TrayPublishCreator): def get_icon(self): return "fa.file" - def create(self, subset_name, data, pre_create_data): + def create(self, product_name, data, pre_create_data): file_paths = pre_create_data.get("filepath") if not file_paths: return @@ -62,7 +62,7 @@ class BatchMovieCreator(TrayPublishCreator): asset_doc, version = get_asset_doc_from_file_name( file_name, self.project_name, self.version_regex) - subset_name, task_name = self._get_subset_and_task( + product_name, task_name = self._get_product_and_task( asset_doc, data["variant"], self.project_name) asset_name = get_asset_name_identifier(asset_doc) @@ -71,21 +71,22 @@ class BatchMovieCreator(TrayPublishCreator): instance_data["task"] = task_name # Create new instance - new_instance = CreatedInstance(self.family, subset_name, + new_instance = CreatedInstance(self.product_type, product_name, instance_data, self) self._store_new_instance(new_instance) - def _get_subset_and_task(self, asset_doc, variant, project_name): - """Create subset name according to standard template process""" + def _get_product_and_task(self, asset_doc, variant, project_name): + """Create product name according to standard template process""" task_name = self._get_task_name(asset_doc) - + host_name = self.create_context.host_name try: - subset_name = get_subset_name( - self.family, - variant, - task_name, + product_name = get_product_name( + project_name, asset_doc, - project_name + task_name, + host_name, + self.product_type, + variant, ) except TaskNotSetError: # Create instance with fake task @@ -93,15 +94,16 @@ class BatchMovieCreator(TrayPublishCreator): # but user have ability to change it # NOTE: This expect that there is not task 'Undefined' on asset task_name = "Undefined" - subset_name = get_subset_name( - self.family, - variant, - task_name, + product_name = get_product_name( + project_name, asset_doc, - project_name + task_name, + host_name, + self.product_type, + variant, ) - return subset_name, task_name + return product_name, task_name def _get_task_name(self, asset_doc): """Get applicable task from 'asset_doc' """ diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_online.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_online.py index db11d30afe..a25da0bf34 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_online.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_online.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- """Creator of online files. -Online file retain their original name and use it as subset name. To -avoid conflicts, this creator checks if subset with this name already +Online file retain their original name and use it as product name. To +avoid conflicts, this creator checks if product with this name already exists under selected asset. """ from pathlib import Path @@ -21,7 +21,7 @@ class OnlineCreator(TrayPublishCreator): identifier = "io.openpype.creators.traypublisher.online" label = "Online" - family = "online" + product_type = "online" description = "Publish file retaining its original file name" extensions = [".mov", ".mp4", ".mxf", ".m4v", ".mpg", ".exr", ".dpx", ".tif", ".png", ".jpg"] @@ -30,7 +30,7 @@ class OnlineCreator(TrayPublishCreator): return """# Create file retaining its original file name. This will publish files using template helping to retain original - file name and that file name is used as subset name. + file name and that file name is used as product name. Bz default it tries to guard against multiple publishes of the same file.""" @@ -38,7 +38,7 @@ class OnlineCreator(TrayPublishCreator): def get_icon(self): return "fa.file" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): repr_file = pre_create_data.get("representation_file") if not repr_file: raise CreatorError("No files specified") @@ -50,27 +50,27 @@ class OnlineCreator(TrayPublishCreator): origin_basename = Path(files[0]).stem - # disable check for existing subset with the same name + # disable check for existing product with the same name """ asset = get_asset_by_name( - self.project_name, instance_data["asset"], fields=["_id"]) + self.project_name, instance_data["folderPath"], fields=["_id"]) if get_subset_by_name( self.project_name, origin_basename, asset["_id"], fields=["_id"]): - raise CreatorError(f"subset with {origin_basename} already " + raise CreatorError(f"product with {origin_basename} already " "exists in selected asset") """ instance_data["originalBasename"] = origin_basename - subset_name = origin_basename + product_name = origin_basename instance_data["creator_attributes"] = { "path": (Path(repr_file["directory"]) / files[0]).as_posix() } # Create new instance - new_instance = CreatedInstance(self.family, subset_name, + new_instance = CreatedInstance(self.product_type, product_name, instance_data, self) self._store_new_instance(new_instance) @@ -100,16 +100,16 @@ class OnlineCreator(TrayPublishCreator): ) ] - def get_subset_name( + def get_product_name( self, - variant, - task_name, - asset_doc, project_name, + asset_doc, + task_name, + variant, host_name=None, instance=None ): if instance is None: return "{originalBasename}" - return instance.data["subset"] + return instance.data["productName"] diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_shot_instances.py b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_shot_instances.py index b19eb36168..d489528c57 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_shot_instances.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_shot_instances.py @@ -17,7 +17,7 @@ class CollectShotInstance(pyblish.api.InstancePlugin): families = ["shot"] SHARED_KEYS = [ - "asset", + "folderPath", "fps", "handleStart", "handleEnd", @@ -132,7 +132,7 @@ class CollectShotInstance(pyblish.api.InstancePlugin): "sourceIn": _cr_attrs["sourceIn"], "sourceOut": _cr_attrs["sourceOut"], "workfileFrameStart": workfile_start_frame, - "asset": _cr_attrs["folderPath"], + "folderPath": _cr_attrs["folderPath"], } def _solve_hierarchy_context(self, instance): @@ -170,7 +170,7 @@ class CollectShotInstance(pyblish.api.InstancePlugin): parents = instance.data.get('parents', []) # Split by '/' for AYON where asset is a path - asset_name = instance.data["asset"].split("/")[-1] + asset_name = instance.data["folderPath"].split("/")[-1] actual = {asset_name: in_info} for parent in reversed(parents): diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/help/validate_existing_version.xml b/client/ayon_core/hosts/traypublisher/plugins/publish/help/validate_existing_version.xml index 8a3b8f4d7d..726ccdffe3 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/help/validate_existing_version.xml +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/help/validate_existing_version.xml @@ -5,7 +5,7 @@ ## Version already exists -Version {version} you have set on instance '{subset_name}' under '{asset_name}' already exists. This validation is enabled by default to prevent accidental override of existing versions. +Version {version} you have set on instance '{product_name}' under '{asset_name}' already exists. This validation is enabled by default to prevent accidental override of existing versions. ### How to repair? - Click on 'Repair' action -> this will change version to next available. diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py index 6a85f92ce1..ddfe8904fa 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py @@ -34,13 +34,13 @@ class ValidateExistingVersion( if last_version is None or last_version < version: return - subset_name = instance.data["subset"] - msg = "Version {} already exists for subset {}.".format( - version, subset_name) + product_name = instance.data["productName"] + msg = "Version {} already exists for product {}.".format( + version, product_name) formatting_data = { - "subset_name": subset_name, - "asset_name": instance.data["asset"], + "product_name": product_name, + "asset_name": instance.data["folderPath"], "version": version } raise PublishXmlValidationError( diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_filepaths.py b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_filepaths.py index 4a4f3dae69..c673b1977b 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_filepaths.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_filepaths.py @@ -28,14 +28,14 @@ class ValidateFilePath(pyblish.api.InstancePlugin): )) return - family = instance.data["family"] + product_type = instance.data["productType"] label = instance.data["name"] filepaths = instance.data["sourceFilepaths"] if not filepaths: raise PublishValidationError( ( "Source filepaths of '{}' instance \"{}\" are not filled" - ).format(family, label), + ).format(product_type, label), "File not filled", ( "## Files were not filled" @@ -59,7 +59,7 @@ class ValidateFilePath(pyblish.api.InstancePlugin): raise PublishValidationError( ( "Filepath of '{}' instance \"{}\" does not exist:\n{}" - ).format(family, label, joined_paths), + ).format(product_type, label, joined_paths), "File not found", ( "## Files were not found\nFiles\n{}" diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_online_file.py b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_online_file.py index e655578095..3bd55342af 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_online_file.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_online_file.py @@ -11,7 +11,7 @@ from ayon_core.client import get_subset_by_name class ValidateOnlineFile(OptionalPyblishPluginMixin, pyblish.api.InstancePlugin): - """Validate that subset doesn't exist yet.""" + """Validate that product doesn't exist yet.""" label = "Validate Existing Online Files" hosts = ["traypublisher"] families = ["online"] @@ -24,10 +24,10 @@ class ValidateOnlineFile(OptionalPyblishPluginMixin, return project_name = instance.context.data["projectName"] asset_id = instance.data["assetEntity"]["_id"] - subset = get_subset_by_name( - project_name, instance.data["subset"], asset_id) + subset_doc = get_subset_by_name( + project_name, instance.data["productName"], asset_id) - if subset: + if subset_doc: raise PublishValidationError( "Subset to be published already exists.", title=self.label diff --git a/client/ayon_core/hosts/tvpaint/addon.py b/client/ayon_core/hosts/tvpaint/addon.py index 375f7266ae..6756b274f9 100644 --- a/client/ayon_core/hosts/tvpaint/addon.py +++ b/client/ayon_core/hosts/tvpaint/addon.py @@ -1,5 +1,5 @@ import os -from ayon_core.modules import OpenPypeModule, IHostAddon +from ayon_core.addon import AYONAddon, IHostAddon TVPAINT_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -12,13 +12,10 @@ def get_launch_script_path(): ) -class TVPaintAddon(OpenPypeModule, IHostAddon): +class TVPaintAddon(AYONAddon, IHostAddon): name = "tvpaint" host_name = "tvpaint" - def initialize(self, module_settings): - self.enabled = True - def add_implementation_envs(self, env, _app): """Modify environments to contain all required for implementation.""" diff --git a/client/ayon_core/hosts/tvpaint/api/pipeline.py b/client/ayon_core/hosts/tvpaint/api/pipeline.py index 1360b423b3..1b0227e89c 100644 --- a/client/ayon_core/hosts/tvpaint/api/pipeline.py +++ b/client/ayon_core/hosts/tvpaint/api/pipeline.py @@ -13,7 +13,6 @@ from ayon_core.hosts.tvpaint import TVPAINT_ROOT_DIR from ayon_core.settings import get_current_project_settings from ayon_core.lib import register_event_callback from ayon_core.pipeline import ( - legacy_io, register_loader_plugin_path, register_creator_plugin_path, AVALON_CONTAINER_ID, @@ -53,7 +52,7 @@ expected that there are also keys `["instances0", "instances1"]`. Workfile data looks like: ``` [avalon] -instances0=[{{__dq__}id{__dq__}: {__dq__}pyblish.avalon.instance{__dq__... +instances0=[{{__dq__}id{__dq__}: {__dq__}ayon.create.instance{__dq__... instances1=...more data... instances=2 ``` @@ -66,11 +65,10 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): def install(self): """Install TVPaint-specific functionality.""" - log.info("OpenPype - Installing TVPaint integration") - legacy_io.install() + log.info("AYON - Installing TVPaint integration") # Create workdir folder if does not exist yet - workdir = legacy_io.Session["AVALON_WORKDIR"] + workdir = os.getenv("AYON_WORKDIR") if not os.path.exists(workdir): os.makedirs(workdir) @@ -101,7 +99,7 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): Union[str, None]: Current asset name. """ - return self.get_current_context().get("asset_name") + return self.get_current_context().get("folder_path") def get_current_task_name(self): """ @@ -117,11 +115,13 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): return get_global_context() if "project_name" in context: + if "asset_name" in context: + context["folder_path"] = context["asset_name"] return context # This is legacy way how context was stored return { "project_name": context.get("project"), - "asset_name": context.get("asset"), + "folder_path": context.get("asset"), "task_name": context.get("task") } @@ -157,7 +157,7 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): return execute_george(george_script) def work_root(self, session): - return session["AVALON_WORKDIR"] + return session["AYON_WORKDIR"] def get_current_workfile(self): return execute_george("tv_GetProjectName") @@ -176,7 +176,7 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): # Setup project settings if its the template that's launched. # TODO also check for template creation when it's possible to define # templates - last_workfile = os.environ.get("AVALON_LAST_WORKFILE") + last_workfile = os.environ.get("AYON_LAST_WORKFILE") if not last_workfile or os.path.exists(last_workfile): return diff --git a/client/ayon_core/hosts/tvpaint/api/plugin.py b/client/ayon_core/hosts/tvpaint/api/plugin.py index 88a0e74528..ef9f82b783 100644 --- a/client/ayon_core/hosts/tvpaint/api/plugin.py +++ b/client/ayon_core/hosts/tvpaint/api/plugin.py @@ -3,7 +3,7 @@ import re from ayon_core.pipeline import LoaderPlugin from ayon_core.pipeline.create import ( CreatedInstance, - get_subset_name, + get_product_name, AutoCreator, Creator, ) @@ -17,8 +17,8 @@ SHARED_DATA_KEY = "openpype.tvpaint.instances" class TVPaintCreatorCommon: @property - def subset_template_family_filter(self): - return self.family + def product_template_product_type(self): + return self.product_type def _cache_and_get_instances(self): return cache_and_get_instances( @@ -53,29 +53,29 @@ class TVPaintCreatorCommon: cur_instance_data.update(instance_data) self.host.write_instances(cur_instances) - def _custom_get_subset_name( + def _custom_get_product_name( self, - variant, - task_name, - asset_doc, project_name, + asset_doc, + task_name, + variant, host_name=None, instance=None ): dynamic_data = self.get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name, instance + project_name, asset_doc, task_name, variant, host_name, instance ) - return get_subset_name( - self.family, - variant, - task_name, - asset_doc, + return get_product_name( project_name, + asset_doc, + task_name, host_name, + self.product_type, + variant, dynamic_data=dynamic_data, project_settings=self.project_settings, - family_filter=self.subset_template_family_filter + product_type_filter=self.product_template_product_type ) @@ -118,8 +118,8 @@ class TVPaintCreator(Creator, TVPaintCreatorCommon): output["task"] = task_name return output - def get_subset_name(self, *args, **kwargs): - return self._custom_get_subset_name(*args, **kwargs) + def get_product_name(self, *args, **kwargs): + return self._custom_get_product_name(*args, **kwargs) def _store_new_instance(self, new_instance): instances_data = self.host.list_instances() @@ -135,8 +135,8 @@ class TVPaintAutoCreator(AutoCreator, TVPaintCreatorCommon): def update_instances(self, update_list): self._update_create_instances(update_list) - def get_subset_name(self, *args, **kwargs): - return self._custom_get_subset_name(*args, **kwargs) + def get_product_name(self, *args, **kwargs): + return self._custom_get_product_name(*args, **kwargs) class Loader(LoaderPlugin): @@ -161,8 +161,8 @@ class Loader(LoaderPlugin): `0` is used ase base. Args: - asset_name (str): Name of subset's parent asset document. - name (str): Name of loaded subset. + asset_name (str): Name of product's parent asset document. + name (str): Name of loaded product. Returns: (str): `{asset_name}_{name}_{higher suffix + 1}` diff --git a/client/ayon_core/hosts/tvpaint/plugins/create/convert_legacy.py b/client/ayon_core/hosts/tvpaint/plugins/create/convert_legacy.py index d3c6c06c8a..1415adac2b 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/create/convert_legacy.py +++ b/client/ayon_core/hosts/tvpaint/plugins/create/convert_legacy.py @@ -101,7 +101,7 @@ class TVPaintLegacyConverted(SubsetConvertorPlugin): render_layer["creator_attributes"] = { "group_id": group_id } - render_layer["family"] = "render" + render_layer["productType"] = "render" group = groups_by_id[group_id] # Use group name for variant group["variant"] = group["name"] @@ -128,7 +128,7 @@ class TVPaintLegacyConverted(SubsetConvertorPlugin): render_pass["creator_identifier"] = "render.pass" render_pass["instance_id"] = render_pass.pop("uuid") - render_pass["family"] = "render" + render_pass["productType"] = "render" render_pass["creator_attributes"] = { "render_layer_instance_id": render_layer["instance_id"] diff --git a/client/ayon_core/hosts/tvpaint/plugins/create/create_render.py b/client/ayon_core/hosts/tvpaint/plugins/create/create_render.py index 7d908e8018..dc53ccb9ca 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/create/create_render.py +++ b/client/ayon_core/hosts/tvpaint/plugins/create/create_render.py @@ -118,8 +118,8 @@ class CreateRenderlayer(TVPaintCreator): """ label = "Render Layer" - family = "render" - subset_template_family_filter = "renderLayer" + product_type = "render" + product_template_product_type = "renderLayer" identifier = "render.layer" icon = "fa5.images" @@ -149,10 +149,10 @@ class CreateRenderlayer(TVPaintCreator): self.mark_for_review = plugin_settings["mark_for_review"] def get_dynamic_data( - self, variant, task_name, asset_doc, project_name, host_name, instance + self, project_name, asset_doc, task_name, variant, host_name, instance ): dynamic_data = super().get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name, instance + project_name, asset_doc, task_name, variant, host_name, instance ) dynamic_data["renderpass"] = self.default_pass_name dynamic_data["renderlayer"] = variant @@ -165,7 +165,7 @@ class CreateRenderlayer(TVPaintCreator): if layer["selected"] } - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): self.log.debug("Query data from workfile.") group_name = instance_data["variant"] @@ -195,7 +195,7 @@ class CreateRenderlayer(TVPaintCreator): ): raise CreatorError(( f"Group \"{group_item.get('name')}\" is already used" - f" by another render layer \"{instance['subset']}\"" + f" by another render layer \"{instance['productName']}\"" )) self.log.debug(f"Selected group id is \"{group_id}\".") @@ -208,10 +208,10 @@ class CreateRenderlayer(TVPaintCreator): creator_attributes["group_id"] = group_id creator_attributes["mark_for_review"] = mark_for_review - self.log.info(f"Subset name is {subset_name}") + self.log.info(f"Subset name is {product_name}") new_instance = CreatedInstance( - self.family, - subset_name, + self.product_type, + product_name, instance_data, self ) @@ -374,8 +374,8 @@ class CreateRenderlayer(TVPaintCreator): class CreateRenderPass(TVPaintCreator): - family = "render" - subset_template_family_filter = "renderPass" + product_type = "render" + product_template_product_type = "renderPass" identifier = "render.pass" label = "Render Pass" icon = "fa5.image" @@ -425,10 +425,10 @@ class CreateRenderPass(TVPaintCreator): self._add_instance_to_context(instance) def get_dynamic_data( - self, variant, task_name, asset_doc, project_name, host_name, instance + self, project_name, asset_doc, task_name, variant, host_name, instance ): dynamic_data = super().get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name, instance + project_name, asset_doc, task_name, variant, host_name, instance ) dynamic_data["renderpass"] = variant dynamic_data["renderlayer"] = "{renderlayer}" @@ -447,7 +447,7 @@ class CreateRenderPass(TVPaintCreator): "renderlayer": render_layer_variant }) try: - new_label = instance["subset"].format(**render_layer_data) + new_label = instance["productName"].format(**render_layer_data) except (KeyError, ValueError): pass @@ -457,7 +457,7 @@ class CreateRenderPass(TVPaintCreator): instance["group"] = new_group return old_group != new_group or old_label != new_label - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): render_layer_instance_id = pre_create_data.get( "render_layer_instance_id" ) @@ -523,18 +523,18 @@ class CreateRenderPass(TVPaintCreator): instances_to_remove.append(instance) render_layer = render_layer_instance["variant"] - subset_name_fill_data = {"renderlayer": render_layer} + product_name_fill_data = {"renderlayer": render_layer} - # Format dynamic keys in subset name - label = subset_name + # Format dynamic keys in product name + label = product_name try: label = label.format( - **prepare_template_data(subset_name_fill_data) + **prepare_template_data(product_name_fill_data) ) except (KeyError, ValueError): pass - self.log.info(f"New subset name is \"{label}\".") + self.log.info(f"New product name is \"{label}\".") instance_data["label"] = label instance_data["group"] = f"{self.get_group_label()} ({render_layer})" instance_data["layer_names"] = list(marked_layer_names) @@ -551,8 +551,8 @@ class CreateRenderPass(TVPaintCreator): ) new_instance = CreatedInstance( - self.family, - subset_name, + self.product_type, + product_name, instance_data, self ) @@ -608,7 +608,7 @@ class CreateRenderPass(TVPaintCreator): render_layers = [ { "value": inst["instance_id"], - "label": inst["subset"] + "label": inst["productName"] } for inst in current_instances if inst.get("creator_identifier") == CreateRenderlayer.identifier @@ -674,7 +674,7 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): Never will have any instances, all instances belong to different creators. """ - family = "render" + product_type = "render" label = "Render Layer/Passes" identifier = "render.auto.detect.creator" order = CreateRenderPass.order + 10 @@ -777,31 +777,31 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): self.create_context.creators[CreateRenderlayer.identifier] ) - subset_name: str = creator.get_subset_name( - variant, - task_name, - asset_doc, + product_name: str = creator.get_product_name( project_name, + asset_doc, + task_name, + variant, host_name=self.create_context.host_name, ) asset_name = get_asset_name_identifier(asset_doc) if existing_instance is not None: existing_instance["folderPath"] = asset_name existing_instance["task"] = task_name - existing_instance["subset"] = subset_name + existing_instance["productName"] = product_name return existing_instance instance_data: dict[str, str] = { "folderPath": asset_name, "task": task_name, - "family": creator.family, + "productType": creator.product_type, "variant": variant, } pre_create_data: dict[str, str] = { "group_id": group_id, "mark_for_review": mark_for_review } - return creator.create(subset_name, instance_data, pre_create_data) + return creator.create(product_name, instance_data, pre_create_data) def _prepare_render_passes( self, @@ -831,11 +831,11 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): if (render_pass["layer_names"]) > 1: variant = render_pass["variant"] - subset_name = creator.get_subset_name( - variant, - task_name, - asset_doc, + product_name = creator.get_product_name( project_name, + asset_doc, + task_name, + variant, host_name=self.create_context.host_name, instance=render_pass ) @@ -843,13 +843,13 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): if render_pass is not None: render_pass["folderPath"] = asset_name render_pass["task"] = task_name - render_pass["subset"] = subset_name + render_pass["productName"] = product_name continue instance_data: dict[str, str] = { "folderPath": asset_name, "task": task_name, - "family": creator.family, + "productType": creator.product_type, "variant": variant } @@ -858,7 +858,7 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): "layer_names": [layer_name], "mark_for_review": mark_for_review } - creator.create(subset_name, instance_data, pre_create_data) + creator.create(product_name, instance_data, pre_create_data) def _filter_groups( self, @@ -884,7 +884,7 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): new_groups_order.append(group_id) return new_groups_order - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): project_name: str = self.create_context.get_current_project_name() asset_name: str = instance_data["folderPath"] task_name: str = instance_data["task"] @@ -1023,8 +1023,8 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): class TVPaintSceneRenderCreator(TVPaintAutoCreator): - family = "render" - subset_template_family_filter = "renderScene" + product_type = "render" + product_template_product_type = "renderScene" identifier = "render.scene" label = "Scene Render" icon = "fa.file-image-o" @@ -1044,8 +1044,23 @@ class TVPaintSceneRenderCreator(TVPaintAutoCreator): self.active_on_create = plugin_settings["active_on_create"] self.default_pass_name = plugin_settings["default_pass_name"] - def get_dynamic_data(self, variant, *args, **kwargs): - dynamic_data = super().get_dynamic_data(variant, *args, **kwargs) + def get_dynamic_data( + self, + project_name, + asset_doc, + task_name, + variant, + host_name, + instance + ): + dynamic_data = super().get_dynamic_data( + project_name, + asset_doc, + task_name, + variant, + host_name, + instance + ) dynamic_data["renderpass"] = "{renderpass}" dynamic_data["renderlayer"] = variant return dynamic_data @@ -1058,12 +1073,12 @@ class TVPaintSceneRenderCreator(TVPaintAutoCreator): task_name = create_context.get_current_task_name() asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - self.default_variant, - task_name, - asset_doc, + product_name = self.get_product_name( project_name, - host_name + asset_doc, + task_name, + self.default_variant, + host_name, ) data = { "folderPath": asset_name, @@ -1074,7 +1089,7 @@ class TVPaintSceneRenderCreator(TVPaintAutoCreator): "mark_for_review": True }, "label": self._get_label( - subset_name, + product_name, self.default_pass_name ) } @@ -1082,7 +1097,7 @@ class TVPaintSceneRenderCreator(TVPaintAutoCreator): data["active"] = False new_instance = CreatedInstance( - self.family, subset_name, data, self + self.product_type, product_name, data, self ) instances_data = self.host.list_instances() instances_data.append(new_instance.data_to_store()) @@ -1112,32 +1127,32 @@ class TVPaintSceneRenderCreator(TVPaintAutoCreator): or existing_instance["task"] != task_name ): asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - existing_instance["variant"], - task_name, - asset_doc, + product_name = self.get_product_name( project_name, + asset_doc, + task_name, + existing_instance["variant"], host_name, existing_instance ) existing_instance["folderPath"] = asset_name existing_instance["task"] = task_name - existing_instance["subset"] = subset_name + existing_instance["productName"] = product_name existing_instance["label"] = self._get_label( - existing_instance["subset"], + existing_instance["productName"], existing_instance["creator_attributes"]["render_pass_name"] ) - def _get_label(self, subset_name, render_pass_name): + def _get_label(self, product_name, render_pass_name): try: - subset_name = subset_name.format(**prepare_template_data({ + product_name = product_name.format(**prepare_template_data({ "renderpass": render_pass_name })) except (KeyError, ValueError): pass - return subset_name + return product_name def get_instance_attr_defs(self): return [ diff --git a/client/ayon_core/hosts/tvpaint/plugins/create/create_review.py b/client/ayon_core/hosts/tvpaint/plugins/create/create_review.py index 773b85c1f5..1837726cab 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/create/create_review.py +++ b/client/ayon_core/hosts/tvpaint/plugins/create/create_review.py @@ -4,7 +4,7 @@ from ayon_core.hosts.tvpaint.api.plugin import TVPaintAutoCreator class TVPaintReviewCreator(TVPaintAutoCreator): - family = "review" + product_type = "review" identifier = "scene.review" label = "Review" icon = "ei.video" @@ -40,11 +40,11 @@ class TVPaintReviewCreator(TVPaintAutoCreator): if existing_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - self.default_variant, - task_name, - asset_doc, + product_name = self.get_product_name( project_name, + asset_doc, + task_name, + self.default_variant, host_name ) data = { @@ -57,7 +57,7 @@ class TVPaintReviewCreator(TVPaintAutoCreator): data["active"] = False new_instance = CreatedInstance( - self.family, subset_name, data, self + self.product_type, product_name, data, self ) instances_data = self.host.list_instances() instances_data.append(new_instance.data_to_store()) @@ -69,14 +69,14 @@ class TVPaintReviewCreator(TVPaintAutoCreator): or existing_instance["task"] != task_name ): asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - existing_instance["variant"], - task_name, - asset_doc, + product_name = self.get_product_name( project_name, + asset_doc, + task_name, + existing_instance["variant"], host_name, existing_instance ) existing_instance["folderPath"] = asset_name existing_instance["task"] = task_name - existing_instance["subset"] = subset_name + existing_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/tvpaint/plugins/create/create_workfile.py b/client/ayon_core/hosts/tvpaint/plugins/create/create_workfile.py index f0d1c7bae6..14a11750a5 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/tvpaint/plugins/create/create_workfile.py @@ -4,7 +4,7 @@ from ayon_core.hosts.tvpaint.api.plugin import TVPaintAutoCreator class TVPaintWorkfileCreator(TVPaintAutoCreator): - family = "workfile" + product_type = "workfile" identifier = "workfile" label = "Workfile" icon = "fa.file-o" @@ -36,11 +36,11 @@ class TVPaintWorkfileCreator(TVPaintAutoCreator): if existing_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - self.default_variant, - task_name, - asset_doc, + product_name = self.get_product_name( project_name, + asset_doc, + task_name, + self.default_variant, host_name ) data = { @@ -50,7 +50,7 @@ class TVPaintWorkfileCreator(TVPaintAutoCreator): } new_instance = CreatedInstance( - self.family, subset_name, data, self + self.product_type, product_name, data, self ) instances_data = self.host.list_instances() instances_data.append(new_instance.data_to_store()) @@ -62,14 +62,14 @@ class TVPaintWorkfileCreator(TVPaintAutoCreator): or existing_instance["task"] != task_name ): asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - existing_instance["variant"], - task_name, - asset_doc, + product_name = self.get_product_name( project_name, + asset_doc, + task_name, + existing_instance["variant"], host_name, existing_instance ) existing_instance["folderPath"] = asset_name existing_instance["task"] = task_name - existing_instance["subset"] = subset_name + existing_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/tvpaint/plugins/load/load_reference_image.py b/client/ayon_core/hosts/tvpaint/plugins/load/load_reference_image.py index 0a12e93f44..5e629f7b7f 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/load/load_reference_image.py +++ b/client/ayon_core/hosts/tvpaint/plugins/load/load_reference_image.py @@ -83,8 +83,8 @@ class LoadImage(plugin.Loader): # Prepare layer name asset_name = context["asset"]["name"] - subset_name = context["subset"]["name"] - layer_name = self.get_unique_layer_name(asset_name, subset_name) + product_name = context["subset"]["name"] + layer_name = self.get_unique_layer_name(asset_name, product_name) path = self.filepath_from_context(context) @@ -210,15 +210,17 @@ class LoadImage(plugin.Loader): def switch(self, container, representation): self.update(container, representation) - def update(self, container, representation): + def update(self, container, context): """Replace container with different version. New layers are loaded as first step. Then is tried to change data in new layers with data from old layers. When that is done old layers are removed. """ + + repre_doc = context["representation"] # Create new containers first - context = get_representation_context(representation) + context = get_representation_context(repre_doc) # Get layer ids from previous container old_layer_names = self.get_members_from_container(container) diff --git a/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py b/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py index e29ecfd442..49ef9fc37b 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py +++ b/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py @@ -58,7 +58,7 @@ class LoadWorkfile(plugin.Loader): if not asset_name: context = get_current_context() project_name = context["project_name"] - asset_name = context["asset_name"] + asset_name = context["folder_path"] task_name = context["task_name"] template_key = get_workfile_template_key_from_context( @@ -101,7 +101,7 @@ class LoadWorkfile(plugin.Loader): "tvpaint", task_name=task_name, task_type=data["task"]["type"], - family="workfile" + product_type="workfile" ) else: version += 1 diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/collect_instance_frames.py b/client/ayon_core/hosts/tvpaint/plugins/publish/collect_instance_frames.py index 63f04cf3ce..e7b7b2cad1 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/collect_instance_frames.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/collect_instance_frames.py @@ -31,6 +31,6 @@ class CollectOutputFrameRange(pyblish.api.InstancePlugin): instance.data["frameEnd"] = frame_end self.log.info( "Set frames {}-{} on instance {} ".format( - frame_start, frame_end, instance.data["subset"] + frame_start, frame_end, instance.data["productName"] ) ) diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/collect_render_instances.py b/client/ayon_core/hosts/tvpaint/plugins/publish/collect_render_instances.py index 029c4b7e18..596d257f22 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/collect_render_instances.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/collect_render_instances.py @@ -28,10 +28,10 @@ class CollectRenderInstances(pyblish.api.InstancePlugin): self._collect_data_for_review(instance) return - subset_name = instance.data["subset"] - instance.data["name"] = subset_name + product_name = instance.data["productName"] + instance.data["name"] = product_name instance.data["label"] = "{} [{}-{}]".format( - subset_name, + product_name, context.data["sceneMarkIn"] + 1, context.data["sceneMarkOut"] + 1 ) @@ -84,8 +84,8 @@ class CollectRenderInstances(pyblish.api.InstancePlugin): if render_layer_data is None: return render_layer_name = render_layer_data["variant"] - subset_name = instance.data["subset"] - instance.data["subset"] = subset_name.format( + product_name = instance.data["productName"] + instance.data["productName"] = product_name.format( **prepare_template_data({"renderlayer": render_layer_name}) ) @@ -103,8 +103,8 @@ class CollectRenderInstances(pyblish.api.InstancePlugin): render_pass_name = ( instance.data["creator_attributes"]["render_pass_name"] ) - subset_name = instance.data["subset"] - instance.data["subset"] = subset_name.format( + product_name = instance.data["productName"] + instance.data["productName"] = product_name.format( **prepare_template_data({"renderpass": render_pass_name}) ) diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/collect_workfile.py b/client/ayon_core/hosts/tvpaint/plugins/publish/collect_workfile.py index a3449663f8..a9e9db3872 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/collect_workfile.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/collect_workfile.py @@ -14,7 +14,7 @@ class CollectWorkfile(pyblish.api.InstancePlugin): current_file = context.data["currentFile"] self.log.info( - "Workfile path used for workfile family: {}".format(current_file) + "Workfile path used for workfile product: {}".format(current_file) ) dirpath, filename = os.path.split(current_file) diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/collect_workfile_data.py b/client/ayon_core/hosts/tvpaint/plugins/publish/collect_workfile_data.py index 9fbf67863a..414b09c123 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/collect_workfile_data.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/collect_workfile_data.py @@ -4,7 +4,6 @@ import tempfile import pyblish.api -from ayon_core.pipeline import legacy_io from ayon_core.hosts.tvpaint.api.lib import ( execute_george, execute_george_through_file, @@ -66,7 +65,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): # Collect and store current context to have reference current_context = { "project_name": context.data["projectName"], - "asset_name": context.data["asset"], + "folder_path": context.data["folderPath"], "task_name": context.data["task"] } self.log.debug("Current context is: {}".format(current_context)) @@ -78,7 +77,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): if "project" in workfile_context: workfile_context = { "project_name": workfile_context.get("project"), - "asset_name": workfile_context.get("asset"), + "folder_path": workfile_context.get("asset"), "task_name": workfile_context.get("task"), } # Store workfile context to pyblish context @@ -86,19 +85,18 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): if workfile_context: # Change current context with context from workfile key_map = ( - ("AVALON_ASSET", "asset_name"), - ("AVALON_TASK", "task_name") + ("AYON_FOLDER_PATH", "folder_path"), + ("AYON_TASK_NAME", "task_name") ) for env_key, key in key_map: - legacy_io.Session[env_key] = workfile_context[key] os.environ[env_key] = workfile_context[key] self.log.info("Context changed to: {}".format(workfile_context)) - asset_name = workfile_context["asset_name"] + asset_name = workfile_context["folder_path"] task_name = workfile_context["task_name"] else: - asset_name = current_context["asset_name"] + asset_name = current_context["folder_path"] task_name = current_context["task_name"] # Handle older workfiles or workfiles without metadata self.log.warning(( @@ -107,7 +105,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): )) # Store context asset name - context.data["asset"] = asset_name + context.data["folderPath"] = asset_name context.data["task"] = task_name self.log.info( "Context is set to Asset: \"{}\" and Task: \"{}\"".format( diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py b/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py index 6d54d8ec32..ab30e3dc10 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -31,7 +31,7 @@ class ExtractSequence(pyblish.api.Extractor): families = ["review", "render"] # Modifiable with settings - review_bg = [255, 255, 255, 255] + review_bg = [255, 255, 255, 1.0] def process(self, instance): self.log.info( @@ -111,7 +111,7 @@ class ExtractSequence(pyblish.api.Extractor): "Files will be rendered to folder: {}".format(output_dir) ) - if instance.data["family"] == "review": + if instance.data["productType"] == "review": result = self.render_review( output_dir, mark_in, mark_out, scene_bg_color ) diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_asset_name.xml b/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_asset_name.xml index 33a9ca4247..83753b3410 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_asset_name.xml +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_asset_name.xml @@ -2,9 +2,9 @@ Subset context -## Invalid subset context +## Invalid product context -Context of the given subset doesn't match your current scene. +Context of the given product doesn't match your current scene. ### How to repair? @@ -15,7 +15,7 @@ After that restart publishing with Reload button. ### How could this happen? -The subset was created in different scene with different context +The product was created in different scene with different context or the scene file was copy pasted from different context. diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_duplicated_layer_names.xml b/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_duplicated_layer_names.xml index 5d798544c0..23c899cfc6 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_duplicated_layer_names.xml +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_duplicated_layer_names.xml @@ -10,13 +10,13 @@ Can't determine which layers should be published because there are duplicated la {layer_names} -*Check layer names for all subsets in list on left side.* +*Check layer names for all products in list on left side.* ### How to repair? Hide/rename/remove layers that should not be published. -If all of them should be published then you have duplicated subset names in the scene. In that case you have to recrete them and use different variant name. +If all of them should be published then you have duplicated product names in the scene. In that case you have to recrete them and use different variant name. diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_layers_visibility.xml b/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_layers_visibility.xml index 5832c74350..5013f38eca 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_layers_visibility.xml +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_layers_visibility.xml @@ -4,13 +4,13 @@ Layers visibility ## All layers are not visible -Layers visibility was changed during publishing which caused that all layers for subset "{instance_name}" are hidden. +Layers visibility was changed during publishing which caused that all layers for product "{instance_name}" are hidden. ### Layer names for **{instance_name}** {layer_names} -*Check layer names for all subsets in the list on the left side.* +*Check layer names for all products in the list on the left side.* ### How to repair? diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_missing_layer_names.xml b/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_missing_layer_names.xml index e96e7c5044..000fe84844 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_missing_layer_names.xml +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_missing_layer_names.xml @@ -4,7 +4,7 @@ Missing layers ## Missing layers for render pass -Render pass subset "{instance_name}" has stored layer names that belong to it's rendering scope but layers were not found in scene. +Render pass product "{instance_name}" has stored layer names that belong to it's rendering scope but layers were not found in scene. ### Missing layer names @@ -12,7 +12,7 @@ Render pass subset "{instance_name}" has stored layer names that belong to it's ### How to repair? -Find layers that belong to subset {instance_name} and rename them back to expected layer names or remove the subset and create new with right layers. +Find layers that belong to product {instance_name} and rename them back to expected layer names or remove the product and create new with right layers. diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/validate_asset_name.py b/client/ayon_core/hosts/tvpaint/plugins/publish/validate_asset_name.py index 62603a460b..927d601e34 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/validate_asset_name.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/validate_asset_name.py @@ -20,7 +20,7 @@ class FixAssetNames(pyblish.api.Action): on = "failed" def process(self, context, plugin): - context_asset_name = context.data["asset"] + context_asset_name = context.data["folderPath"] old_instance_items = list_instances() new_instance_items = [] for instance_item in old_instance_items: @@ -51,9 +51,9 @@ class ValidateAssetName( def process(self, context): if not self.is_active(context.data): return - context_asset_name = context.data["asset"] + context_asset_name = context.data["folderPath"] for instance in context: - asset_name = instance.data.get("asset") + asset_name = instance.data.get("folderPath") if asset_name and asset_name == context_asset_name: continue diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/validate_render_layer_group.py b/client/ayon_core/hosts/tvpaint/plugins/publish/validate_render_layer_group.py index 66793cbc7f..0e97a01de2 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/validate_render_layer_group.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/validate_render_layer_group.py @@ -4,7 +4,7 @@ from ayon_core.pipeline import PublishXmlValidationError class ValidateRenderLayerGroups(pyblish.api.ContextPlugin): - """Validate group ids of renderLayer subsets. + """Validate group ids of renderLayer products. Validate that there are not 2 render layers using the same group. """ @@ -46,19 +46,21 @@ class ValidateRenderLayerGroups(pyblish.api.ContextPlugin): group["name"], group["group_id"], ) - line_join_subset_names = "\n".join([ - f" - {instance['subset']}" + line_join_product_names = "\n".join([ + f" - {instance['productName']}" for instance in instances ]) - joined_subset_names = ", ".join([ - f"\"{instance['subset']}\"" + joined_product_names = ", ".join([ + f"\"{instance['productName']}\"" for instance in instances ]) per_group_msgs.append( - "{} < {} >".format(group_label, joined_subset_names) + "{} < {} >".format(group_label, joined_product_names) ) groups_information_lines.append( - "{}\n{}".format(group_label, line_join_subset_names) + "{}\n{}".format( + group_label, line_join_product_names + ) ) # Raise an error diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/validate_workfile_metadata.py b/client/ayon_core/hosts/tvpaint/plugins/publish/validate_workfile_metadata.py index bdc46d02cd..1d9954d051 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/validate_workfile_metadata.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/validate_workfile_metadata.py @@ -31,7 +31,7 @@ class ValidateWorkfileMetadata(pyblish.api.ContextPlugin): actions = [ValidateWorkfileMetadataRepair] - required_keys = {"project_name", "asset_name", "task_name"} + required_keys = {"project_name", "folder_path", "task_name"} def process(self, context): workfile_context = context.data["workfile_context"] diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/validate_workfile_project_name.py b/client/ayon_core/hosts/tvpaint/plugins/publish/validate_workfile_project_name.py index be3259bfd8..5b42842717 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/validate_workfile_project_name.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/validate_workfile_project_name.py @@ -6,7 +6,7 @@ class ValidateWorkfileProjectName(pyblish.api.ContextPlugin): """Validate project name stored in workfile metadata. It is not possible to publish from different project than is set in - environment variable "AVALON_PROJECT". + environment variable "AYON_PROJECT_NAME". """ label = "Validate Workfile Project Name" diff --git a/client/ayon_core/hosts/unreal/addon.py b/client/ayon_core/hosts/unreal/addon.py index 745df951c1..c65490bd8c 100644 --- a/client/ayon_core/hosts/unreal/addon.py +++ b/client/ayon_core/hosts/unreal/addon.py @@ -1,17 +1,14 @@ import os import re -from ayon_core.modules import IHostAddon, OpenPypeModule +from ayon_core.addon import AYONAddon, IHostAddon UNREAL_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class UnrealAddon(OpenPypeModule, IHostAddon): +class UnrealAddon(AYONAddon, IHostAddon): name = "unreal" host_name = "unreal" - def initialize(self, module_settings): - self.enabled = True - def get_global_environments(self): return { "AYON_UNREAL_ROOT": UNREAL_ROOT_DIR, diff --git a/client/ayon_core/hosts/unreal/api/plugin.py b/client/ayon_core/hosts/unreal/api/plugin.py index ddf54f6c79..f31c7c46b9 100644 --- a/client/ayon_core/hosts/unreal/api/plugin.py +++ b/client/ayon_core/hosts/unreal/api/plugin.py @@ -35,52 +35,50 @@ class UnrealBaseCreator(Creator): suffix = "_INS" @staticmethod - def cache_subsets(shared_data): + def cache_instance_data(shared_data): """Cache instances for Creators to shared data. - Create `unreal_cached_subsets` key when needed in shared data and + Create `unreal_cached_instances` key when needed in shared data and fill it with all collected instances from the scene under its respective creator identifiers. If legacy instances are detected in the scene, create - `unreal_cached_legacy_subsets` there and fill it with - all legacy subsets under family as a key. + `unreal_cached_legacy_instances` there and fill it with + all legacy products under family as a key. Args: Dict[str, Any]: Shared data. - Return: - Dict[str, Any]: Shared data dictionary. - """ - if shared_data.get("unreal_cached_subsets") is None: - unreal_cached_subsets = collections.defaultdict(list) - unreal_cached_legacy_subsets = collections.defaultdict(list) - for instance in ls_inst(): - creator_id = instance.get("creator_identifier") - if creator_id: - unreal_cached_subsets[creator_id].append(instance) - else: - family = instance.get("family") - unreal_cached_legacy_subsets[family].append(instance) + if "unreal_cached_instances" in shared_data: + return - shared_data["unreal_cached_subsets"] = unreal_cached_subsets - shared_data["unreal_cached_legacy_subsets"] = ( - unreal_cached_legacy_subsets - ) - return shared_data + unreal_cached_instances = collections.defaultdict(list) + unreal_cached_legacy_instances = collections.defaultdict(list) + for instance in ls_inst(): + creator_id = instance.get("creator_identifier") + if creator_id: + unreal_cached_instances[creator_id].append(instance) + else: + family = instance.get("family") + unreal_cached_legacy_instances[family].append(instance) - def create(self, subset_name, instance_data, pre_create_data): + shared_data["unreal_cached_instances"] = unreal_cached_instances + shared_data["unreal_cached_legacy_instances"] = ( + unreal_cached_legacy_instances + ) + + def create(self, product_name, instance_data, pre_create_data): try: - instance_name = f"{subset_name}{self.suffix}" + instance_name = f"{product_name}{self.suffix}" pub_instance = create_publish_instance(instance_name, self.root) - instance_data["subset"] = subset_name + instance_data["productName"] = product_name instance_data["instance_path"] = f"{self.root}/{instance_name}" instance = CreatedInstance( - self.family, - subset_name, + self.product_type, + product_name, instance_data, self) self._add_instance_to_context(instance) @@ -106,9 +104,9 @@ class UnrealBaseCreator(Creator): def collect_instances(self): # cache instances if missing - self.cache_subsets(self.collection_shared_data) + self.cache_instance_data(self.collection_shared_data) for instance in self.collection_shared_data[ - "unreal_cached_subsets"].get(self.identifier, []): + "unreal_cached_instances"].get(self.identifier, []): # Unreal saves metadata as string, so we need to convert it back instance['creator_attributes'] = ast.literal_eval( instance.get('creator_attributes', '{}')) @@ -148,11 +146,11 @@ class UnrealBaseCreator(Creator): class UnrealAssetCreator(UnrealBaseCreator): """Base class for Unreal creator plugins based on assets.""" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): """Create instance of the asset. Args: - subset_name (str): Name of the subset. + product_name (str): Name of the product. instance_data (dict): Data for the instance. pre_create_data (dict): Data for the instance. @@ -172,7 +170,7 @@ class UnrealAssetCreator(UnrealBaseCreator): a.get_path_name() for a in sel_objects] super(UnrealAssetCreator, self).create( - subset_name, + product_name, instance_data, pre_create_data) @@ -192,11 +190,11 @@ class UnrealAssetCreator(UnrealBaseCreator): class UnrealActorCreator(UnrealBaseCreator): """Base class for Unreal creator plugins based on actors.""" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): """Create instance of the asset. Args: - subset_name (str): Name of the subset. + product_name (str): Name of the product. instance_data (dict): Data for the instance. pre_create_data (dict): Data for the instance. @@ -226,7 +224,7 @@ class UnrealActorCreator(UnrealBaseCreator): instance_data["level"] = world.get_path_name() super(UnrealActorCreator, self).create( - subset_name, + product_name, instance_data, pre_create_data) diff --git a/client/ayon_core/hosts/unreal/api/rendering.py b/client/ayon_core/hosts/unreal/api/rendering.py index 8717788732..395513aefa 100644 --- a/client/ayon_core/hosts/unreal/api/rendering.py +++ b/client/ayon_core/hosts/unreal/api/rendering.py @@ -56,11 +56,11 @@ def start_rendering(): for i in instances: data = pipeline.parse_container(i.get_path_name()) - if data["family"] == "render": + if data["productType"] == "render": inst_data.append(data) try: - project = os.environ.get("AVALON_PROJECT") + project = os.environ.get("AYON_PROJECT_NAME") anatomy = Anatomy(project) root = anatomy.roots['renders'] except Exception as e: diff --git a/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py b/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py index 4317844ca5..0eaa1adb84 100644 --- a/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py +++ b/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py @@ -146,7 +146,7 @@ class UnrealPrelaunchHook(PreLaunchHook): def execute(self): """Hook entry method.""" - workdir = self.launch_context.env["AVALON_WORKDIR"] + workdir = self.launch_context.env["AYON_WORKDIR"] executable = str(self.launch_context.executable) engine_version = self.app_name.split("/")[-1].replace("-", ".") try: diff --git a/client/ayon_core/hosts/unreal/plugins/create/create_camera.py b/client/ayon_core/hosts/unreal/plugins/create/create_camera.py index f78de00f44..3ffb9dd70b 100644 --- a/client/ayon_core/hosts/unreal/plugins/create/create_camera.py +++ b/client/ayon_core/hosts/unreal/plugins/create/create_camera.py @@ -13,10 +13,10 @@ class CreateCamera(UnrealAssetCreator): identifier = "io.ayon.creators.unreal.camera" label = "Camera" - family = "camera" + product_type = "camera" icon = "fa.camera" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): if pre_create_data.get("use_selection"): sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() selection = [a.get_path_name() for a in sel_objects] @@ -33,6 +33,6 @@ class CreateCamera(UnrealAssetCreator): instance_data["level"] = world.get_path_name() super(CreateCamera, self).create( - subset_name, + product_name, instance_data, pre_create_data) diff --git a/client/ayon_core/hosts/unreal/plugins/create/create_layout.py b/client/ayon_core/hosts/unreal/plugins/create/create_layout.py index 0ec8a8d445..9bcddfe507 100644 --- a/client/ayon_core/hosts/unreal/plugins/create/create_layout.py +++ b/client/ayon_core/hosts/unreal/plugins/create/create_layout.py @@ -9,5 +9,5 @@ class CreateLayout(UnrealActorCreator): identifier = "io.ayon.creators.unreal.layout" label = "Layout" - family = "layout" + product_type = "layout" icon = "cubes" diff --git a/client/ayon_core/hosts/unreal/plugins/create/create_look.py b/client/ayon_core/hosts/unreal/plugins/create/create_look.py index ecc0783c35..edc6d45f2f 100644 --- a/client/ayon_core/hosts/unreal/plugins/create/create_look.py +++ b/client/ayon_core/hosts/unreal/plugins/create/create_look.py @@ -16,10 +16,10 @@ class CreateLook(UnrealAssetCreator): identifier = "io.ayon.creators.unreal.look" label = "Look" - family = "look" + product_type = "look" icon = "paint-brush" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): # We need to set this to True for the parent class to work pre_create_data["use_selection"] = True sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() @@ -33,7 +33,7 @@ class CreateLook(UnrealAssetCreator): look_directory = "/Game/Ayon/Looks" # Create the folder - folder_name = create_folder(look_directory, subset_name) + folder_name = create_folder(look_directory, product_name) path = f"{look_directory}/{folder_name}" instance_data["look"] = path @@ -67,7 +67,7 @@ class CreateLook(UnrealAssetCreator): unreal.EditorAssetLibrary.save_asset(object_path) super(CreateLook, self).create( - subset_name, + product_name, instance_data, pre_create_data) diff --git a/client/ayon_core/hosts/unreal/plugins/create/create_render.py b/client/ayon_core/hosts/unreal/plugins/create/create_render.py index 5bb782e7ea..cbec84c543 100644 --- a/client/ayon_core/hosts/unreal/plugins/create/create_render.py +++ b/client/ayon_core/hosts/unreal/plugins/create/create_render.py @@ -24,11 +24,11 @@ class CreateRender(UnrealAssetCreator): identifier = "io.ayon.creators.unreal.render" label = "Render" - family = "render" + product_type = "render" icon = "eye" def create_instance( - self, instance_data, subset_name, pre_create_data, + self, instance_data, product_name, pre_create_data, selected_asset_path, master_seq, master_lvl, seq_data ): instance_data["members"] = [selected_asset_path] @@ -40,12 +40,12 @@ class CreateRender(UnrealAssetCreator): instance_data["frameEnd"] = seq_data.get('frame_range')[1] super(CreateRender, self).create( - subset_name, + product_name, instance_data, pre_create_data) def create_with_new_sequence( - self, subset_name, instance_data, pre_create_data + self, product_name, instance_data, pre_create_data ): # If the option to create a new level sequence is selected, # create a new level sequence and a master level. @@ -53,7 +53,7 @@ class CreateRender(UnrealAssetCreator): root = f"/Game/Ayon/Sequences" # Create a new folder for the sequence in root - sequence_dir_name = create_folder(root, subset_name) + sequence_dir_name = create_folder(root, product_name) sequence_dir = f"{root}/{sequence_dir_name}" unreal.log_warning(f"sequence_dir: {sequence_dir}") @@ -61,7 +61,7 @@ class CreateRender(UnrealAssetCreator): # Create the level sequence asset_tools = unreal.AssetToolsHelpers.get_asset_tools() seq = asset_tools.create_asset( - asset_name=subset_name, + asset_name=product_name, package_path=sequence_dir, asset_class=unreal.LevelSequence, factory=unreal.LevelSequenceFactoryNew()) @@ -92,7 +92,7 @@ class CreateRender(UnrealAssetCreator): else: unreal.EditorLevelLibrary.save_current_level() - ml_path = f"{sequence_dir}/{subset_name}_MasterLevel" + ml_path = f"{sequence_dir}/{product_name}_MasterLevel" if UNREAL_VERSION.major >= 5: unreal.LevelEditorSubsystem().new_level(ml_path) @@ -107,11 +107,11 @@ class CreateRender(UnrealAssetCreator): seq.get_playback_end())} self.create_instance( - instance_data, subset_name, pre_create_data, + instance_data, product_name, pre_create_data, seq.get_path_name(), seq.get_path_name(), ml_path, seq_data) def create_from_existing_sequence( - self, subset_name, instance_data, pre_create_data + self, product_name, instance_data, pre_create_data ): ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -224,16 +224,16 @@ class CreateRender(UnrealAssetCreator): continue self.create_instance( - instance_data, subset_name, pre_create_data, + instance_data, product_name, pre_create_data, selected_asset_path, master_seq, master_lvl, seq_data) - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): if pre_create_data.get("create_seq"): self.create_with_new_sequence( - subset_name, instance_data, pre_create_data) + product_name, instance_data, pre_create_data) else: self.create_from_existing_sequence( - subset_name, instance_data, pre_create_data) + product_name, instance_data, pre_create_data) def get_pre_create_attr_defs(self): return [ diff --git a/client/ayon_core/hosts/unreal/plugins/create/create_staticmeshfbx.py b/client/ayon_core/hosts/unreal/plugins/create/create_staticmeshfbx.py index 7fcd6f165a..603b852873 100644 --- a/client/ayon_core/hosts/unreal/plugins/create/create_staticmeshfbx.py +++ b/client/ayon_core/hosts/unreal/plugins/create/create_staticmeshfbx.py @@ -9,5 +9,5 @@ class CreateStaticMeshFBX(UnrealAssetCreator): identifier = "io.ayon.creators.unreal.staticmeshfbx" label = "Static Mesh (FBX)" - family = "unrealStaticMesh" + product_type = "unrealStaticMesh" icon = "cube" diff --git a/client/ayon_core/hosts/unreal/plugins/create/create_uasset.py b/client/ayon_core/hosts/unreal/plugins/create/create_uasset.py index 500726497d..1cd532c63d 100644 --- a/client/ayon_core/hosts/unreal/plugins/create/create_uasset.py +++ b/client/ayon_core/hosts/unreal/plugins/create/create_uasset.py @@ -14,12 +14,12 @@ class CreateUAsset(UnrealAssetCreator): identifier = "io.ayon.creators.unreal.uasset" label = "UAsset" - family = "uasset" + product_type = "uasset" icon = "cube" extension = ".uasset" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): if pre_create_data.get("use_selection"): ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -44,7 +44,7 @@ class CreateUAsset(UnrealAssetCreator): f"{Path(sys_path).name} is not a {self.label}.") super(CreateUAsset, self).create( - subset_name, + product_name, instance_data, pre_create_data) @@ -54,13 +54,13 @@ class CreateUMap(CreateUAsset): identifier = "io.ayon.creators.unreal.umap" label = "Level" - family = "uasset" + product_type = "uasset" extension = ".umap" - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): instance_data["families"] = ["umap"] super(CreateUMap, self).create( - subset_name, + product_name, instance_data, pre_create_data) diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_alembic_animation.py b/client/ayon_core/hosts/unreal/plugins/load/load_alembic_animation.py index 4d7760e385..a4fcda6ade 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_alembic_animation.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_alembic_animation.py @@ -56,7 +56,7 @@ class AnimationAlembicLoader(plugin.Loader): Args: context (dict): application context - name (str): subset name + name (str): Product name namespace (str): in Unreal this is basically path to container. This is not passed here, so namespace is set by `containerise()` because only then we know @@ -126,12 +126,15 @@ class AnimationAlembicLoader(plugin.Loader): return asset_content - def update(self, container, representation): - name = container["asset_name"] - source_path = get_representation_path(representation) + def update(self, container, context): + folder_name = container["asset_name"] + repre_doc = context["representation"] + source_path = get_representation_path(repre_doc) destination_path = container["namespace"] - task = self.get_task(source_path, destination_path, name, True) + task = self.get_task( + source_path, destination_path, folder_name, True + ) # do import fbx and replace existing data asset_tools = unreal.AssetToolsHelpers.get_asset_tools() @@ -143,8 +146,8 @@ class AnimationAlembicLoader(plugin.Loader): unreal_pipeline.imprint( container_path, { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]) + "representation": str(repre_doc["_id"]), + "parent": str(repre_doc["parent"]) }) asset_content = unreal.EditorAssetLibrary.list_assets( diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_animation.py b/client/ayon_core/hosts/unreal/plugins/load/load_animation.py index 4d44b6c0c2..3aad6886be 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_animation.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_animation.py @@ -128,7 +128,7 @@ class AnimationFBXLoader(plugin.Loader): Args: context (dict): application context - name (str): subset name + name (str): Product name namespace (str): in Unreal this is basically path to container. This is not passed here, so namespace is set by `containerise()` because only then we know @@ -246,9 +246,10 @@ class AnimationFBXLoader(plugin.Loader): unreal.EditorLevelLibrary.save_current_level() unreal.EditorLevelLibrary.load_level(master_level) - def update(self, container, representation): - name = container["asset_name"] - source_path = get_representation_path(representation) + def update(self, container, context): + repre_doc = context["representation"] + folder_name = container["asset_name"] + source_path = get_representation_path(repre_doc) asset_doc = get_current_project_asset(fields=["data.fps"]) destination_path = container["namespace"] @@ -258,7 +259,7 @@ class AnimationFBXLoader(plugin.Loader): task.set_editor_property('filename', source_path) task.set_editor_property('destination_path', destination_path) # strip suffix - task.set_editor_property('destination_name', name) + task.set_editor_property('destination_name', folder_name) task.set_editor_property('replace_existing', True) task.set_editor_property('automated', True) task.set_editor_property('save', True) @@ -305,8 +306,8 @@ class AnimationFBXLoader(plugin.Loader): unreal_pipeline.imprint( container_path, { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]) + "representation": str(repre_doc["_id"]), + "parent": str(repre_doc["parent"]) }) asset_content = EditorAssetLibrary.list_assets( diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_camera.py b/client/ayon_core/hosts/unreal/plugins/load/load_camera.py index faba561085..34c1e3e023 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_camera.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_camera.py @@ -70,7 +70,7 @@ class CameraLoader(plugin.Loader): Args: context (dict): application context - name (str): subset name + name (str): Product name namespace (str): in Unreal this is basically path to container. This is not passed here, so namespace is set by `containerise()` because only then we know @@ -260,7 +260,7 @@ class CameraLoader(plugin.Loader): return asset_content - def update(self, container, representation): + def update(self, container, context): ar = unreal.AssetRegistryHelpers.get_asset_registry() curr_level_sequence = LevelSequenceLib.get_current_level_sequence() @@ -379,12 +379,13 @@ class CameraLoader(plugin.Loader): sub_scene.set_sequence(new_sequence) + repre_doc = context["representation"] self._import_camera( EditorLevelLibrary.get_editor_world(), new_sequence, new_sequence.get_bindings(), settings, - str(representation["data"]["path"]) + str(repre_doc["data"]["path"]) ) # Set range of all sections @@ -412,8 +413,8 @@ class CameraLoader(plugin.Loader): key.set_time(unreal.FrameNumber(value=new_time)) data = { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]) + "representation": str(repre_doc["_id"]), + "parent": str(repre_doc["parent"]) } imprint(f"{asset_dir}/{container.get('container_name')}", data) diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_geometrycache_abc.py b/client/ayon_core/hosts/unreal/plugins/load/load_geometrycache_abc.py index 360737cbc5..bca99f202f 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_geometrycache_abc.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_geometrycache_abc.py @@ -108,7 +108,7 @@ class PointCacheAlembicLoader(plugin.Loader): Args: context (dict): application context - name (str): subset name + name (str): Product name namespace (str): in Unreal this is basically path to container. This is not passed here, so namespace is set by `containerise()` because only then we know @@ -163,25 +163,30 @@ class PointCacheAlembicLoader(plugin.Loader): return asset_content - def update(self, container, representation): - context = representation.get("context", {}) - - unreal.log_warning(context) - - if not context: - raise RuntimeError("No context found in representation") + def update(self, container, context): + asset_doc = context["asset"] + subset_doc = context["subset"] + version_doc = context["version"] + repre_doc = context["representation"] # Create directory for asset and Ayon container - asset = context.get('asset') - name = context.get('subset') + folder_name = asset_doc["name"] + product_name = subset_doc["name"] + suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = product_name + if folder_name: + asset_name = f"{folder_name}_{product_name}" + # Check if version is hero version and use different name - name_version = f"{name}_v{version:03d}" if version else f"{name}_hero" + version = version_doc.get("name", -1) + if version < 0: + name_version = f"{product_name}_hero" + else: + name_version = f"{product_name}_v{version:03d}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="") + f"{self.root}/{folder_name}/{name_version}", suffix="") container_name += suffix @@ -189,14 +194,14 @@ class PointCacheAlembicLoader(plugin.Loader): frame_end = int(container.get("frame_end")) if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): - path = get_representation_path(representation) + path = get_representation_path(repre_doc) self.import_and_containerize( path, asset_dir, asset_name, container_name, frame_start, frame_end) self.imprint( - asset, asset_dir, container_name, asset_name, representation, + folder_name, asset_dir, container_name, asset_name, repre_doc, frame_start, frame_end) asset_content = unreal.EditorAssetLibrary.list_assets( diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_layout.py b/client/ayon_core/hosts/unreal/plugins/load/load_layout.py index a1cc2e785a..f78dba9e57 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_layout.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_layout.py @@ -369,16 +369,18 @@ class LayoutLoader(plugin.Loader): if representation not in repr_loaded: repr_loaded.append(representation) - family = element.get('family') + product_type = element.get("product_type") + if product_type is None: + product_type = element.get("family") loaders = loaders_from_representation( all_loaders, representation) loader = None if repr_format == 'fbx': - loader = self._get_fbx_loader(loaders, family) + loader = self._get_fbx_loader(loaders, product_type) elif repr_format == 'abc': - loader = self._get_abc_loader(loaders, family) + loader = self._get_abc_loader(loaders, product_type) if not loader: self.log.error( @@ -422,12 +424,12 @@ class LayoutLoader(plugin.Loader): actors = [] - if family == 'model': + if product_type == 'model': actors, _ = self._process_family( assets, 'StaticMesh', transform, basis, sequence, inst ) - elif family == 'rig': + elif product_type == 'rig': actors, bindings = self._process_family( assets, 'SkeletalMesh', transform, basis, sequence, inst @@ -481,7 +483,7 @@ class LayoutLoader(plugin.Loader): for asset_container in asset_containers: package_path = asset_container.get_editor_property('package_path') family = EditorAssetLibrary.get_metadata_tag( - asset_container.get_asset(), 'family') + asset_container.get_asset(), "family") assets = EditorAssetLibrary.list_assets( str(package_path), recursive=False) if family == 'model': @@ -501,7 +503,7 @@ class LayoutLoader(plugin.Loader): Args: context (dict): application context - name (str): subset name + name (str): Product name namespace (str): in Unreal this is basically path to container. This is not passed here, so namespace is set by `containerise()` because only then we know @@ -659,7 +661,7 @@ class LayoutLoader(plugin.Loader): return asset_content - def update(self, container, representation): + def update(self, container, context): data = get_current_project_settings() create_sequences = data["unreal"]["level_sequences_for_layouts"] @@ -675,9 +677,11 @@ class LayoutLoader(plugin.Loader): root = "/Game/Ayon" asset_dir = container.get('namespace') - context = representation.get("context") - hierarchy = context.get('hierarchy').split("/") + asset_doc = context["asset"] + repre_doc = context["representation"] + + hierarchy = list(asset_doc["data"]["parents"]) sequence = None master_level = None @@ -726,13 +730,13 @@ class LayoutLoader(plugin.Loader): EditorAssetLibrary.delete_directory(f"{asset_dir}/animations/") - source_path = get_representation_path(representation) + source_path = get_representation_path(repre_doc) loaded_assets = self._process(source_path, asset_dir, sequence) data = { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]), + "representation": str(repre_doc["_id"]), + "parent": str(repre_doc["parent"]), "loaded_assets": loaded_assets } imprint( diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_layout_existing.py b/client/ayon_core/hosts/unreal/plugins/load/load_layout_existing.py index 6f390b4920..cf987765f4 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_layout_existing.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_layout_existing.py @@ -33,9 +33,9 @@ class ExistingLayoutLoader(plugin.Loader): delete_unmatched_assets = True @classmethod - def apply_settings(cls, project_settings, *args, **kwargs): + def apply_settings(cls, project_settings): super(ExistingLayoutLoader, cls).apply_settings( - project_settings, *args, **kwargs + project_settings ) cls.delete_unmatched_assets = ( project_settings["unreal"]["delete_unmatched_assets"] @@ -284,9 +284,9 @@ class ExistingLayoutLoader(plugin.Loader): # Create the container for the asset. asset = repr_data.get('context').get('asset') - subset = repr_data.get('context').get('subset') + product_name = repr_data.get('context').get('subset') container = self._create_container( - f"{asset}_{subset}", mesh_path, asset, + f"{asset}_{product_name}", mesh_path, asset, repr_data.get('_id'), repr_data.get('parent'), repr_data.get('context').get('family') ) @@ -407,16 +407,18 @@ class ExistingLayoutLoader(plugin.Loader): } upipeline.imprint(f"{curr_level_path}/{container_name}", data) - def update(self, container, representation): + def update(self, container, context): asset_dir = container.get('namespace') - source_path = get_representation_path(representation) - project_name = get_current_project_name() + project_name = context["project"]["name"] + repre_doc = context["representation"] + + source_path = get_representation_path(repre_doc) containers = self._process(source_path, project_name) data = { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]), + "representation": str(repre_doc["_id"]), + "parent": str(repre_doc["parent"]), "loaded_assets": containers } upipeline.imprint( diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_abc.py b/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_abc.py index 225df3b440..58fbda491c 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_abc.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_abc.py @@ -94,7 +94,7 @@ class SkeletalMeshAlembicLoader(plugin.Loader): Args: context (dict): application context - name (str): subset name + name (str): Product name namespace (str): in Unreal this is basically path to container. This is not passed here, so namespace is set by `containerise()` because only then we know @@ -144,34 +144,40 @@ class SkeletalMeshAlembicLoader(plugin.Loader): return asset_content - def update(self, container, representation): - context = representation.get("context", {}) + def update(self, container, context): + asset_doc = context["asset"] + subset_doc = context["subset"] + version_doc = context["version"] + repre_doc = context["representation"] - if not context: - raise RuntimeError("No context found in representation") + folder_name = asset_doc["name"] + product_name = subset_doc["name"] - # Create directory for asset and Ayon container - asset = context.get('asset') - name = context.get('subset') + # Create directory for folder and Ayon container suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = product_name + if folder_name: + asset_name = f"{folder_name}_{product_name}" # Check if version is hero version and use different name - name_version = f"{name}_v{version:03d}" if version else f"{name}_hero" + version = version_doc.get("name", -1) + if version < 0: + name_version = f"{product_name}_hero" + else: + name_version = f"{product_name}_v{version:03d}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="") + f"{self.root}/{folder_name}/{name_version}", suffix="") container_name += suffix if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): - path = get_representation_path(representation) + path = get_representation_path(repre_doc) self.import_and_containerize(path, asset_dir, asset_name, container_name) self.imprint( - asset, asset_dir, container_name, asset_name, representation) + folder_name, asset_dir, container_name, asset_name, repre_doc) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=False diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py b/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py index 1c45c58d02..436d4c8a58 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py @@ -99,7 +99,7 @@ class SkeletalMeshFBXLoader(plugin.Loader): Args: context (dict): application context - name (str): subset name + name (str): Product name namespace (str): in Unreal this is basically path to container. This is not passed here, so namespace is set by `containerise()` because only then we know @@ -146,34 +146,40 @@ class SkeletalMeshFBXLoader(plugin.Loader): return asset_content - def update(self, container, representation): - context = representation.get("context", {}) + def update(self, container, context): + asset_doc = context["asse"] + subset_doc = context["subset"] + version_doc = context["version"] + repre_doc = context["representation"] - if not context: - raise RuntimeError("No context found in representation") + folder_name = asset_doc["name"] + product_name = subset_doc["name"] # Create directory for asset and Ayon container - asset = context.get('asset') - name = context.get('subset') suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = product_name + if folder_name: + asset_name = f"{folder_name}_{product_name}" # Check if version is hero version and use different name - name_version = f"{name}_v{version:03d}" if version else f"{name}_hero" + version = version_doc.get("name", -1) + if version < 0: + name_version = f"{product_name}_hero" + else: + name_version = f"{product_name}_v{version:03d}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="") + f"{self.root}/{folder_name}/{name_version}", suffix="") container_name += suffix if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): - path = get_representation_path(representation) + path = get_representation_path(repre_doc) self.import_and_containerize( path, asset_dir, asset_name, container_name) self.imprint( - asset, asset_dir, container_name, asset_name, representation) + folder_name, asset_dir, container_name, asset_name, repre_doc) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=False diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_abc.py b/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_abc.py index a0814b5b07..6ff4bcd4f2 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_abc.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_abc.py @@ -95,7 +95,7 @@ class StaticMeshAlembicLoader(plugin.Loader): Args: context (dict): application context - name (str): subset name + name (str): Product name namespace (str): in Unreal this is basically path to container. This is not passed here, so namespace is set by `containerise()` because only then we know @@ -145,34 +145,36 @@ class StaticMeshAlembicLoader(plugin.Loader): return asset_content - def update(self, container, representation): - context = representation.get("context", {}) + def update(self, container, context): + asset_doc = context["asset"] + subset_doc = context["subset"] + repre_doc = context["representation"] - if not context: - raise RuntimeError("No context found in representation") + folder_name = asset_doc["name"] + product_name = subset_doc["name"] # Create directory for asset and Ayon container - asset = context.get('asset') - name = context.get('subset') suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" + asset_name = product_name + if folder_name: + asset_name = f"{folder_name}_{product_name}" version = context.get('version') # Check if version is hero version and use different name - name_version = f"{name}_v{version:03d}" if version else f"{name}_hero" + name_version = f"{product_name}_v{version:03d}" if version else f"{product_name}_hero" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="") + f"{self.root}/{folder_name}/{name_version}", suffix="") container_name += suffix if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): - path = get_representation_path(representation) + path = get_representation_path(repre_doc) self.import_and_containerize(path, asset_dir, asset_name, container_name) self.imprint( - asset, asset_dir, container_name, asset_name, representation) + folder_name, asset_dir, container_name, asset_name, repre_doc) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=False diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_fbx.py b/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_fbx.py index a78b1bc959..d2c6fc5566 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_fbx.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_fbx.py @@ -87,7 +87,7 @@ class StaticMeshFBXLoader(plugin.Loader): Args: context (dict): application context - name (str): subset name + name (str): Product name namespace (str): in Unreal this is basically path to container. This is not passed here, so namespace is set by `containerise()` because only then we know @@ -134,34 +134,40 @@ class StaticMeshFBXLoader(plugin.Loader): return asset_content - def update(self, container, representation): - context = representation.get("context", {}) + def update(self, container, context): + asset_doc = context["asset"] + subset_doc = context["subset"] + version_doc = context["version"] + repre_doc = context["representation"] - if not context: - raise RuntimeError("No context found in representation") + folder_name = asset_doc["name"] + product_name = subset_doc["name"] # Create directory for asset and Ayon container - asset = context.get('asset') - name = context.get('subset') suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = product_name + if folder_name: + asset_name = f"{folder_name}_{product_name}" # Check if version is hero version and use different name - name_version = f"{name}_v{version:03d}" if version else f"{name}_hero" + version = version_doc.get("name", -1) + if version < 0: + name_version = f"{product_name}_hero" + else: + name_version = f"{product_name}_v{version:03d}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="") + f"{self.root}/{folder_name}/{name_version}", suffix="") container_name += suffix if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): - path = get_representation_path(representation) + path = get_representation_path(repre_doc) self.import_and_containerize( path, asset_dir, asset_name, container_name) self.imprint( - asset, asset_dir, container_name, asset_name, representation) + folder_name, asset_dir, container_name, asset_name, repre_doc) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=False diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_uasset.py b/client/ayon_core/hosts/unreal/plugins/load/load_uasset.py index 048ec8eaba..9710d213ee 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_uasset.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_uasset.py @@ -28,7 +28,7 @@ class UAssetLoader(plugin.Loader): Args: context (dict): application context - name (str): subset name + name (str): Product name namespace (str): in Unreal this is basically path to container. This is not passed here, so namespace is set by `containerise()` because only then we know @@ -96,11 +96,15 @@ class UAssetLoader(plugin.Loader): return asset_content - def update(self, container, representation): + def update(self, container, context): ar = unreal.AssetRegistryHelpers.get_asset_registry() asset_dir = container["namespace"] - name = representation["context"]["subset"] + + subset_doc = context["subset"] + repre_doc = context["representation"] + + product_name = subset_doc["name"] unique_number = container["container_name"].split("_")[-2] @@ -116,19 +120,20 @@ class UAssetLoader(plugin.Loader): if obj.get_class().get_name() != "AyonAssetContainer": unreal.EditorAssetLibrary.delete_asset(asset) - update_filepath = get_representation_path(representation) + update_filepath = get_representation_path(repre_doc) shutil.copy( update_filepath, - f"{destination_path}/{name}_{unique_number}.{self.extension}") + f"{destination_path}/{product_name}_{unique_number}.{self.extension}" + ) container_path = f'{container["namespace"]}/{container["objectName"]}' # update metadata unreal_pipeline.imprint( container_path, { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]), + "representation": str(repre_doc["_id"]), + "parent": str(repre_doc["parent"]), } ) diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_yeticache.py b/client/ayon_core/hosts/unreal/plugins/load/load_yeticache.py index b643f352b7..c6e275c844 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_yeticache.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_yeticache.py @@ -69,7 +69,7 @@ class YetiLoader(plugin.Loader): Args: context (dict): application context - name (str): subset name + name (str): Product name namespace (str): in Unreal this is basically path to container. This is not passed here, so namespace is set by `containerise()` because only then we know @@ -139,9 +139,10 @@ class YetiLoader(plugin.Loader): return asset_content - def update(self, container, representation): + def update(self, container, context): + repre_doc = context["representation"] name = container["asset_name"] - source_path = get_representation_path(representation) + source_path = get_representation_path(repre_doc) destination_path = container["namespace"] task = self.get_task(source_path, destination_path, name, True) @@ -154,8 +155,8 @@ class YetiLoader(plugin.Loader): unreal_pipeline.imprint( container_path, { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]) + "representation": str(repre_doc["_id"]), + "parent": str(repre_doc["parent"]) }) asset_content = unreal.EditorAssetLibrary.list_assets( diff --git a/client/ayon_core/hosts/unreal/plugins/publish/collect_render_instances.py b/client/ayon_core/hosts/unreal/plugins/publish/collect_render_instances.py index 8641094610..ea53f221ea 100644 --- a/client/ayon_core/hosts/unreal/plugins/publish/collect_render_instances.py +++ b/client/ayon_core/hosts/unreal/plugins/publish/collect_render_instances.py @@ -57,19 +57,22 @@ class CollectRenderInstances(pyblish.api.InstancePlugin): seq = s.get('sequence') seq_name = seq.get_name() + product_type = "render" + new_product_name = f"{data.get('productName')}_{seq_name}" new_instance = context.create_instance( - f"{data.get('subset')}_" - f"{seq_name}") + new_product_name + ) new_instance[:] = seq_name new_data = new_instance.data - new_data["asset"] = seq_name + new_data["folderPath"] = f"/{s.get('output')}" new_data["setMembers"] = seq_name - new_data["family"] = "render" - new_data["families"] = ["render", "review"] + new_data["productName"] = new_product_name + new_data["productType"] = product_type + new_data["family"] = product_type + new_data["families"] = [product_type, "review"] new_data["parent"] = data.get("parent") - new_data["subset"] = f"{data.get('subset')}_{seq_name}" new_data["level"] = data.get("level") new_data["output"] = s.get('output') new_data["fps"] = seq.get_display_rate().numerator diff --git a/client/ayon_core/hosts/unreal/plugins/publish/extract_layout.py b/client/ayon_core/hosts/unreal/plugins/publish/extract_layout.py index a508f79f18..de8cf0be2a 100644 --- a/client/ayon_core/hosts/unreal/plugins/publish/extract_layout.py +++ b/client/ayon_core/hosts/unreal/plugins/publish/extract_layout.py @@ -68,6 +68,7 @@ class ExtractLayout(publish.Extractor): json_element = {} json_element["reference"] = str(blend_id) json_element["family"] = family + json_element["product_type"] = family json_element["instance_name"] = actor.get_name() json_element["asset_name"] = mesh.get_name() import_data = mesh.get_editor_property("asset_import_data") diff --git a/client/ayon_core/lib/__init__.py b/client/ayon_core/lib/__init__.py index 12a5535a1c..ab6a604adc 100644 --- a/client/ayon_core/lib/__init__.py +++ b/client/ayon_core/lib/__init__.py @@ -158,6 +158,7 @@ from .ayon_info import ( is_running_from_build, is_staging_enabled, is_dev_mode_enabled, + is_in_tests, ) @@ -229,6 +230,8 @@ __all__ = [ "IniSettingRegistry", "JSONSettingRegistry", + "AYONSecureRegistry", + "AYONSettingsRegistry", "OpenPypeSecureRegistry", "OpenPypeSettingsRegistry", "get_local_site_id", @@ -271,6 +274,7 @@ __all__ = [ "terminal", "get_datetime_data", + "get_timestamp", "get_formatted_current_time", "Logger", @@ -278,6 +282,7 @@ __all__ = [ "is_running_from_build", "is_staging_enabled", "is_dev_mode_enabled", + "is_in_tests", "requests_get", "requests_post" diff --git a/client/ayon_core/lib/applications.py b/client/ayon_core/lib/applications.py index febdaacdd1..8f1a1d10ea 100644 --- a/client/ayon_core/lib/applications.py +++ b/client/ayon_core/lib/applications.py @@ -13,15 +13,7 @@ import six from ayon_core import AYON_CORE_ROOT from ayon_core.client import get_asset_name_identifier -from ayon_core.settings import ( - get_system_settings, - get_project_settings, - get_local_settings -) -from ayon_core.settings.constants import ( - METADATA_KEYS, - M_DYNAMIC_KEY_LABEL -) +from ayon_core.settings import get_project_settings, get_studio_settings from .log import Logger from .profiles_filtering import filter_profiles from .local_settings import get_ayon_username @@ -230,29 +222,25 @@ class ApplicationGroup: self.manager = manager self._data = data - self.enabled = data.get("enabled", True) - self.label = data.get("label") or None - self.icon = data.get("icon") or None - self._environment = data.get("environment") or {} + self.enabled = data["enabled"] + self.label = data["label"] or None + self.icon = data["icon"] or None + env = {} + try: + env = json.loads(data["environment"]) + except Exception: + pass + self._environment = env - host_name = data.get("host_name", None) + host_name = data["host_name"] or None self.is_host = host_name is not None self.host_name = host_name - variants = data.get("variants") or {} - key_label_mapping = variants.pop(M_DYNAMIC_KEY_LABEL, {}) - for variant_name, variant_data in variants.items(): - if variant_name in METADATA_KEYS: - continue - - if "variant_label" not in variant_data: - variant_label = key_label_mapping.get(variant_name) - if variant_label: - variant_data["variant_label"] = variant_label - - variants[variant_name] = Application( - variant_name, variant_data, self - ) + settings_variants = data["variants"] + variants = {} + for variant_data in settings_variants: + app_variant = Application(variant_data, self) + variants[app_variant.name] = app_variant self.variants = variants @@ -274,62 +262,56 @@ class Application: Object by itself does nothing special. Args: - name (str): Specific version (or variant) of application. - e.g. "maya2020", "nuke11.3", etc. data (dict): Data for the version containing information about executables, variant label or if is enabled. Only required key is `executables`. group (ApplicationGroup): App group object that created the application and under which application belongs. + """ - - def __init__(self, name, data, group): - self.name = name - self.group = group + def __init__(self, data, group): self._data = data - + name = data["name"] + label = data["label"] or name enabled = False if group.enabled: enabled = data.get("enabled", True) - self.enabled = enabled - self.use_python_2 = data.get("use_python_2", False) - - self.label = data.get("variant_label") or name - self.full_name = "/".join((group.name, name)) if group.label: - full_label = " ".join((group.label, self.label)) + full_label = " ".join((group.label, label)) else: - full_label = self.label - self.full_label = full_label - self._environment = data.get("environment") or {} + full_label = label + env = {} + try: + env = json.loads(data["environment"]) + except Exception: + pass - arguments = data.get("arguments") + arguments = data["arguments"] if isinstance(arguments, dict): arguments = arguments.get(platform.system().lower()) if not arguments: arguments = [] + + _executables = data["executables"].get(platform.system().lower(), []) + executables = [ + ApplicationExecutable(executable) + for executable in _executables + ] + + self.group = group + + self.name = name + self.label = label + self.enabled = enabled + self.use_python_2 = data.get("use_python_2", False) + + self.full_name = "/".join((group.name, name)) + self.full_label = full_label self.arguments = arguments - - if "executables" not in data: - self.executables = [ - UndefinedApplicationExecutable() - ] - return - - _executables = data["executables"] - if isinstance(_executables, dict): - _executables = _executables.get(platform.system().lower()) - - if not _executables: - _executables = [] - - executables = [] - for executable in _executables: - executables.append(ApplicationExecutable(executable)) - self.executables = executables + self._environment = env def __repr__(self): return "<{}> - {}".format(self.__class__.__name__, self.full_name) @@ -384,12 +366,12 @@ class ApplicationManager: """Load applications and tools and store them by their full name. Args: - system_settings (dict): Preloaded system settings. When passed manager + studio_settings (dict): Preloaded studio settings. When passed manager will always use these values. Gives ability to create manager using different settings. """ - def __init__(self, system_settings=None): + def __init__(self, studio_settings=None): self.log = Logger.get_logger(self.__class__.__name__) self.app_groups = {} @@ -397,16 +379,16 @@ class ApplicationManager: self.tool_groups = {} self.tools = {} - self._system_settings = system_settings + self._studio_settings = studio_settings self.refresh() - def set_system_settings(self, system_settings): + def set_studio_settings(self, studio_settings): """Ability to change init system settings. This will trigger refresh of manager. """ - self._system_settings = system_settings + self._studio_settings = studio_settings self.refresh() @@ -417,72 +399,37 @@ class ApplicationManager: self.tool_groups.clear() self.tools.clear() - if self._system_settings is not None: - settings = copy.deepcopy(self._system_settings) + if self._studio_settings is not None: + settings = copy.deepcopy(self._studio_settings) else: - settings = get_system_settings( + settings = get_studio_settings( clear_metadata=False, exclude_locals=False ) - all_app_defs = {} + applications_addon_settings = settings["applications"] + # Prepare known applications - app_defs = settings["applications"] - additional_apps = {} + app_defs = applications_addon_settings["applications"] + additional_apps = app_defs.pop("additional_apps") + for additional_app in additional_apps: + app_name = additional_app.pop("name") + if app_name in app_defs: + self.log.warning(( + "Additional application '{}' is already" + " in built-in applications." + ).format(app_name)) + app_defs[app_name] = additional_app + for group_name, variant_defs in app_defs.items(): - if group_name in METADATA_KEYS: - continue - - if group_name == "additional_apps": - additional_apps = variant_defs - else: - all_app_defs[group_name] = variant_defs - - # Prepare additional applications - # - First find dynamic keys that can be used as labels of group - dynamic_keys = {} - for group_name, variant_defs in additional_apps.items(): - if group_name == M_DYNAMIC_KEY_LABEL: - dynamic_keys = variant_defs - break - - # Add additional apps to known applications - for group_name, variant_defs in additional_apps.items(): - if group_name in METADATA_KEYS: - continue - - # Determine group label - label = variant_defs.get("label") - if not label: - # Look for label set in dynamic labels - label = dynamic_keys.get(group_name) - if not label: - label = group_name - variant_defs["label"] = label - - all_app_defs[group_name] = variant_defs - - for group_name, variant_defs in all_app_defs.items(): - if group_name in METADATA_KEYS: - continue - group = ApplicationGroup(group_name, variant_defs, self) self.app_groups[group_name] = group for app in group: self.applications[app.full_name] = app - tools_definitions = settings["tools"]["tool_groups"] - tool_label_mapping = tools_definitions.pop(M_DYNAMIC_KEY_LABEL, {}) - for tool_group_name, tool_group_data in tools_definitions.items(): - if not tool_group_name or tool_group_name in METADATA_KEYS: - continue - - tool_group_label = ( - tool_label_mapping.get(tool_group_name) or tool_group_name - ) - group = EnvironmentToolGroup( - tool_group_name, tool_group_label, tool_group_data, self - ) - self.tool_groups[tool_group_name] = group + tools_definitions = applications_addon_settings["tool_groups"] + for tool_group_data in tools_definitions: + group = EnvironmentToolGroup(tool_group_data, self) + self.tool_groups[group.name] = group for tool in group: self.tools[tool.full_name] = tool @@ -539,7 +486,7 @@ class ApplicationManager: """Launch procedure. For host application it's expected to contain "project_name", - "asset_name" and "task_name". + "folder_path" and "task_name". Args: app_name (str): Name of application that should be launched. @@ -571,30 +518,31 @@ class EnvironmentToolGroup: are same. Args: - name (str): Name of the tool group. - data (dict): Group's information with it's variants. + data (dict): Group information with variants. manager (ApplicationManager): Manager that creates the group. """ - def __init__(self, name, label, data, manager): + def __init__(self, data, manager): + name = data["name"] + label = data["label"] + self.name = name self.label = label self._data = data self.manager = manager - self._environment = data["environment"] - variants = data.get("variants") or {} - label_by_key = variants.pop(M_DYNAMIC_KEY_LABEL, {}) + environment = {} + try: + environment = json.loads(data["environment"]) + except Exception: + pass + self._environment = environment + + variants = data.get("variants") or [] variants_by_name = {} - for variant_name, variant_data in variants.items(): - if variant_name in METADATA_KEYS: - continue - - variant_label = label_by_key.get(variant_name) or variant_name - tool = EnvironmentTool( - variant_name, variant_label, variant_data, self - ) - variants_by_name[variant_name] = tool + for variant_data in variants: + tool = EnvironmentTool(variant_data, self) + variants_by_name[tool.name] = tool self.variants = variants_by_name def __repr__(self): @@ -615,23 +563,25 @@ class EnvironmentTool: Structure of tool information. Args: - name (str): Name of the tool. variant_data (dict): Variant data with environments and host and app variant filters. - group (str): Name of group which wraps tool. + group (EnvironmentToolGroup): Name of group which wraps tool. """ - def __init__(self, name, label, variant_data, group): + def __init__(self, variant_data, group): # Backwards compatibility 3.9.1 - 3.9.2 # - 'variant_data' contained only environments but contain also host # and application variant filters - host_names = variant_data.get("host_names", []) - app_variants = variant_data.get("app_variants", []) + name = variant_data["name"] + label = variant_data["label"] + host_names = variant_data["host_names"] + app_variants = variant_data["app_variants"] - if "environment" in variant_data: - environment = variant_data["environment"] - else: - environment = variant_data + environment = {} + try: + environment = json.loads(variant_data["environment"]) + except Exception: + pass self.host_names = host_names self.app_variants = app_variants @@ -1445,15 +1395,16 @@ class EnvironmentPrepData(dict): if data.get("env") is None: data["env"] = os.environ.copy() - if "system_settings" not in data: - data["system_settings"] = get_system_settings() + project_name = data["project_doc"]["name"] + if "project_settings" not in data: + data["project_settings"] = get_project_settings(project_name) super(EnvironmentPrepData, self).__init__(data) def get_app_environments_for_context( project_name, - asset_name, + folder_path, task_name, app_name, env_group=None, @@ -1464,7 +1415,7 @@ def get_app_environments_for_context( """Prepare environment variables by context. Args: project_name (str): Name of project. - asset_name (str): Name of asset. + folder_path (str): Folder path. task_name (str): Name of task. app_name (str): Name of application that is launched and can be found by ApplicationManager. @@ -1486,7 +1437,7 @@ def get_app_environments_for_context( context = app_manager.create_launch_context( app_name, project_name=project_name, - asset_name=asset_name, + folder_path=folder_path, task_name=task_name, env_group=env_group, launch_type=launch_type, @@ -1570,16 +1521,17 @@ def prepare_app_environments( # Use environments from local settings filtered_local_envs = {} - system_settings = data["system_settings"] - whitelist_envs = system_settings["general"].get("local_env_white_list") - if whitelist_envs: - local_settings = get_local_settings() - local_envs = local_settings.get("environments") or {} - filtered_local_envs = { - key: value - for key, value in local_envs.items() - if key in whitelist_envs - } + # NOTE Overrides for environment variables are not implemented in AYON. + # project_settings = data["project_settings"] + # whitelist_envs = project_settings["general"].get("local_env_white_list") + # if whitelist_envs: + # local_settings = get_local_settings() + # local_envs = local_settings.get("environments") or {} + # filtered_local_envs = { + # key: value + # for key, value in local_envs.items() + # if key in whitelist_envs + # } # Apply local environment variables for already existing values for key, value in filtered_local_envs.items(): @@ -1698,8 +1650,9 @@ def apply_project_environments_value( if project_settings is None: project_settings = get_project_settings(project_name) - env_value = project_settings["global"]["project_environments"] + env_value = project_settings["core"]["project_environments"] if env_value: + env_value = json.loads(env_value) parsed_value = parse_environments(env_value, env_group) env.update(acre.compute( _merge_env(parsed_value, env), @@ -1734,21 +1687,19 @@ def prepare_context_environments(data, env_group=None, addons_manager=None): # Load project specific environments project_name = project_doc["name"] project_settings = get_project_settings(project_name) - system_settings = get_system_settings() data["project_settings"] = project_settings - data["system_settings"] = system_settings app = data["app"] context_env = { - "AVALON_PROJECT": project_doc["name"], - "AVALON_APP_NAME": app.full_name + "AYON_PROJECT_NAME": project_doc["name"], + "AYON_APP_NAME": app.full_name } if asset_doc: asset_name = get_asset_name_identifier(asset_doc) - context_env["AVALON_ASSET"] = asset_name + context_env["AYON_FOLDER_PATH"] = asset_name if task_name: - context_env["AVALON_TASK"] = task_name + context_env["AYON_TASK_NAME"] = task_name log.debug( "Context environments set:\n{}".format( @@ -1766,7 +1717,7 @@ def prepare_context_environments(data, env_group=None, addons_manager=None): if not app.is_host: return - data["env"]["AVALON_APP"] = app.host_name + data["env"]["AYON_HOST_NAME"] = app.host_name if not asset_doc or not task_name: # QUESTION replace with log.info and skip workfile discovery? @@ -1776,7 +1727,7 @@ def prepare_context_environments(data, env_group=None, addons_manager=None): ) workdir_data = get_template_data( - project_doc, asset_doc, task_name, app.host_name, system_settings + project_doc, asset_doc, task_name, app.host_name, project_settings ) data["workdir_data"] = workdir_data @@ -1812,7 +1763,7 @@ def prepare_context_environments(data, env_group=None, addons_manager=None): "Couldn't create workdir because: {}".format(str(exc)) ) - data["env"]["AVALON_WORKDIR"] = workdir + data["env"]["AYON_WORKDIR"] = workdir _prepare_last_workfile(data, workdir, addons_manager) @@ -1929,7 +1880,7 @@ def _prepare_last_workfile(data, workdir, addons_manager): "Setting last workfile path: {}".format(last_workfile_path) ) - data["env"]["AVALON_LAST_WORKFILE"] = last_workfile_path + data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path data["last_workfile_path"] = last_workfile_path @@ -1958,7 +1909,7 @@ def should_start_last_workfile( project_settings = get_project_settings(project_name) profiles = ( project_settings - ["global"] + ["core"] ["tools"] ["Workfiles"] ["last_workfile_on_startup"] @@ -2008,7 +1959,7 @@ def should_workfile_tool_start( project_settings = get_project_settings(project_name) profiles = ( project_settings - ["global"] + ["core"] ["tools"] ["Workfiles"] ["open_workfile_tool_on_startup"] diff --git a/client/ayon_core/lib/ayon_info.py b/client/ayon_core/lib/ayon_info.py index 725e10fa0e..ec37d735d8 100644 --- a/client/ayon_core/lib/ayon_info.py +++ b/client/ayon_core/lib/ayon_info.py @@ -5,7 +5,6 @@ import platform import getpass import socket -from ayon_core.settings.lib import get_local_settings from .execute import get_ayon_launcher_args from .local_settings import get_local_site_id @@ -38,6 +37,16 @@ def is_staging_enabled(): return os.getenv("AYON_USE_STAGING") == "1" +def is_in_tests(): + """Process is running in automatic tests mode. + + Returns: + bool: True if running in tests. + + """ + return os.environ.get("AYON_IN_TESTS") == "1" + + def is_dev_mode_enabled(): """Dev mode is enabled in AYON. @@ -86,7 +95,6 @@ def get_all_current_info(): return { "workstation": get_workstation_info(), "env": os.environ.copy(), - "local_settings": get_local_settings(), "ayon": get_ayon_info(), } diff --git a/client/ayon_core/lib/execute.py b/client/ayon_core/lib/execute.py index 4e3257c3a0..e89c8f22ee 100644 --- a/client/ayon_core/lib/execute.py +++ b/client/ayon_core/lib/execute.py @@ -240,7 +240,7 @@ def run_detached_process(args, **kwargs): Args: - *args (tuple): AYON cli arguments. + args (Iterable[str]): AYON cli arguments. **kwargs (dict): Keyword arguments for subprocess.Popen. Returns: diff --git a/client/ayon_core/lib/local_settings.py b/client/ayon_core/lib/local_settings.py index 33b3232128..022f63a618 100644 --- a/client/ayon_core/lib/local_settings.py +++ b/client/ayon_core/lib/local_settings.py @@ -38,8 +38,8 @@ class AYONSecureRegistry: Registry should be used for private data that should be available only for user. - All passed registry names will have added prefix `OpenPype/` to easier - identify which data were created by OpenPype. + All passed registry names will have added prefix `AYON/` to easier + identify which data were created by AYON. Args: name(str): Name of registry used as identifier for data. diff --git a/client/ayon_core/lib/log.py b/client/ayon_core/lib/log.py index cbb1e41bae..36c39f9d84 100644 --- a/client/ayon_core/lib/log.py +++ b/client/ayon_core/lib/log.py @@ -257,7 +257,7 @@ class Logger: return cls._process_name # Get process name - process_name = os.environ.get("AVALON_APP_NAME") + process_name = os.environ.get("AYON_APP_NAME") if not process_name: try: import psutil diff --git a/client/ayon_core/lib/plugin_tools.py b/client/ayon_core/lib/plugin_tools.py index d204fc2c8f..5ad4da88b9 100644 --- a/client/ayon_core/lib/plugin_tools.py +++ b/client/ayon_core/lib/plugin_tools.py @@ -1,54 +1,131 @@ # -*- coding: utf-8 -*- -"""Avalon/Pyblish plugin tools.""" +"""AYON plugin tools.""" import os import logging import re +import collections log = logging.getLogger(__name__) +CAPITALIZE_REGEX = re.compile(r"[a-zA-Z0-9]") + + +def _capitalize_value(value): + """Capitalize first char of value. + + Function finds first available character or number in passed string + and uppers the character. + + Example: + >>> _capitalize_value("host") + 'Host' + >>> _capitalize_value("01_shot") + '01_shot' + >>> _capitalize_value("_shot") + '_Shot' + + Args: + value (str): Value where to capitalize first character. + """ + + # - conditions are because of possible index errors + # - regex is to skip symbols that are not chars or numbers + # - e.g. "{key}" which starts with curly bracket + capitalized = "" + for idx in range(len(value or "")): + char = value[idx] + if not CAPITALIZE_REGEX.match(char): + capitalized += char + else: + capitalized += char.upper() + capitalized += value[idx + 1:] + break + return capitalized + + +def _separate_keys_and_value(data): + valid_items = [] + hierachy_queue = collections.deque() + hierachy_queue.append((data, [])) + while hierachy_queue: + item = hierachy_queue.popleft() + src_data, keys = item + if src_data is None: + continue + + if isinstance(src_data, (list, tuple, set)): + for idx, item in enumerate(src_data): + hierachy_queue.append((item, keys + [idx])) + continue + + if isinstance(src_data, dict): + for key, value in src_data.items(): + hierachy_queue.append((value, keys + [key])) + continue + + if keys: + valid_items.append((keys, src_data)) + return valid_items + def prepare_template_data(fill_pairs): + """Prepares formatted data for filling template. + + It produces multiple variants of keys (key, Key, KEY) to control + format of filled template. + + Example: + >>> src_data = { + ... "host": "maya", + ... } + >>> output = prepare_template_data(src_data) + >>> sorted(list(output.items())) # sort & list conversion for tests + [('HOST', 'MAYA'), ('Host', 'Maya'), ('host', 'maya')] + + Args: + fill_pairs (Union[dict[str, Any], Iterable[Tuple[str, Any]]]): The + value that are prepared for template. + + Returns: + dict[str, str]: Prepared values for template. """ - Prepares formatted data for filling template. - It produces multiple variants of keys (key, Key, KEY) to control - format of filled template. + valid_items = _separate_keys_and_value(fill_pairs) + output = {} + for item in valid_items: + keys, value = item + upper_value = value.upper() + capitalized_value = _capitalize_value(value) - Args: - fill_pairs (iterable) of tuples (key, value) - Returns: - (dict) - ('host', 'maya') > {'host':'maya', 'Host': 'Maya', 'HOST': 'MAYA'} - - """ - fill_data = {} - regex = re.compile(r"[a-zA-Z0-9]") - for key, value in dict(fill_pairs).items(): - # Handle cases when value is `None` (standalone publisher) - if value is None: + first_key = keys.pop(0) + if not keys: + output[first_key] = value + output[first_key.upper()] = upper_value + output[first_key.capitalize()] = capitalized_value continue - # Keep value as it is - fill_data[key] = value - # Both key and value are with upper case - fill_data[key.upper()] = value.upper() - # Capitalize only first char of value - # - conditions are because of possible index errors - # - regex is to skip symbols that are not chars or numbers - # - e.g. "{key}" which starts with curly bracket - capitalized = "" - for idx in range(len(value or "")): - char = value[idx] - if not regex.match(char): - capitalized += char + # Prepare 'normal', 'upper' and 'capitalized' variables + normal = output.setdefault(first_key, {}) + capitalized = output.setdefault(first_key.capitalize(), {}) + upper = output.setdefault(first_key.upper(), {}) + + keys_deque = collections.deque(keys) + while keys_deque: + key = keys_deque.popleft() + upper_key = key + if isinstance(key, str): + upper_key = key.upper() + + if not keys_deque: + # Fill value on last key + upper[upper_key] = upper_value + capitalized[key] = capitalized_value + normal[key] = value else: - capitalized += char.upper() - capitalized += value[idx + 1:] - break - - fill_data[key.capitalize()] = capitalized - - return fill_data + normal = normal.setdefault(key, {}) + capitalized = capitalized.setdefault(key, {}) + upper = upper.setdefault(upper_key, {}) + return output def source_hash(filepath, *args): diff --git a/client/ayon_core/lib/transcoding.py b/client/ayon_core/lib/transcoding.py index 6c6837dcf9..08e0bc9237 100644 --- a/client/ayon_core/lib/transcoding.py +++ b/client/ayon_core/lib/transcoding.py @@ -1385,23 +1385,26 @@ def _get_image_dimensions(application, input_path, log): def convert_color_values(application, color_value): """Get color mapping for ffmpeg and oiiotool. + Args: application (str): Application for which command should be created. - color_value (list[int]): List of 8bit int values for RGBA. + color_value (tuple[int, int, int, float]): List of 8bit int values + for RGBA. + Returns: str: ffmpeg returns hex string, oiiotool is string with floats. + """ red, green, blue, alpha = color_value if application == "ffmpeg": return "{0:0>2X}{1:0>2X}{2:0>2X}@{3}".format( - red, green, blue, (alpha / 255.0) + red, green, blue, alpha ) elif application == "oiiotool": red = float(red / 255) green = float(green / 255) blue = float(blue / 255) - alpha = float(alpha / 255) return "{0:.3f},{1:.3f},{2:.3f},{3:.3f}".format( red, green, blue, alpha) diff --git a/client/ayon_core/modules/clockify/clockify_module.py b/client/ayon_core/modules/clockify/clockify_module.py index adb7eb66af..58407bfe94 100644 --- a/client/ayon_core/modules/clockify/clockify_module.py +++ b/client/ayon_core/modules/clockify/clockify_module.py @@ -2,22 +2,27 @@ import os import threading import time -from ayon_core.modules import OpenPypeModule, ITrayModule, IPluginPaths +from ayon_core.modules import AYONAddon, ITrayModule, IPluginPaths from ayon_core.client import get_asset_by_name from .constants import CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH -class ClockifyModule(OpenPypeModule, ITrayModule, IPluginPaths): +class ClockifyModule(AYONAddon, ITrayModule, IPluginPaths): name = "clockify" - def initialize(self, modules_settings): - clockify_settings = modules_settings[self.name] - self.enabled = clockify_settings["enabled"] - self.workspace_name = clockify_settings["workspace_name"] + def initialize(self, studio_settings): + enabled = self.name in studio_settings + workspace_name = None + if enabled: + clockify_settings = studio_settings[self.name] + workspace_name = clockify_settings["workspace_name"] - if self.enabled and not self.workspace_name: - raise Exception("Clockify Workspace is not set in settings.") + if enabled and workspace_name: + self.log.warning("Clockify Workspace is not set in settings.") + enabled = False + self.enabled = enabled + self.workspace_name = workspace_name self.timer_manager = None self.MessageWidgetClass = None diff --git a/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py b/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py index 19aa2ef195..f7dd1772b0 100644 --- a/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py +++ b/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py @@ -12,7 +12,7 @@ class ClockifyStart(LauncherAction): def is_compatible(self, session): """Return whether the action is compatible with the session""" - if "AVALON_TASK" in session: + if "AYON_TASK_NAME" in session: return True return False @@ -20,9 +20,9 @@ class ClockifyStart(LauncherAction): self.clockify_api.set_api() user_id = self.clockify_api.user_id workspace_id = self.clockify_api.workspace_id - project_name = session["AVALON_PROJECT"] - asset_name = session["AVALON_ASSET"] - task_name = session["AVALON_TASK"] + project_name = session["AYON_PROJECT_NAME"] + asset_name = session["AYON_FOLDER_PATH"] + task_name = session["AYON_TASK_NAME"] description = asset_name # fetch asset docs diff --git a/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py b/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py index 30f5ae698f..5ef9033ffe 100644 --- a/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py +++ b/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py @@ -36,7 +36,7 @@ class ClockifySync(LauncherAction): raise ClockifyPermissionsCheckFailed( "Current CLockify user is missing permissions for this action!" ) - project_name = session.get("AVALON_PROJECT") or "" + project_name = session.get("AYON_PROJECT_NAME") or "" projects_to_sync = [] if project_name.strip(): diff --git a/client/ayon_core/modules/deadline/deadline_module.py b/client/ayon_core/modules/deadline/deadline_module.py index c98d04759e..97d346c287 100644 --- a/client/ayon_core/modules/deadline/deadline_module.py +++ b/client/ayon_core/modules/deadline/deadline_module.py @@ -4,7 +4,7 @@ import six import sys from ayon_core.lib import requests_get, Logger -from ayon_core.modules import OpenPypeModule, IPluginPaths +from ayon_core.modules import AYONAddon, IPluginPaths class DeadlineWebserviceError(Exception): @@ -13,28 +13,28 @@ class DeadlineWebserviceError(Exception): """ -class DeadlineModule(OpenPypeModule, IPluginPaths): +class DeadlineModule(AYONAddon, IPluginPaths): name = "deadline" - def __init__(self, manager, settings): - self.deadline_urls = {} - super(DeadlineModule, self).__init__(manager, settings) - - def initialize(self, modules_settings): + def initialize(self, studio_settings): # This module is always enabled - deadline_settings = modules_settings[self.name] - self.enabled = deadline_settings["enabled"] - deadline_url = deadline_settings.get("DEADLINE_REST_URL") - if deadline_url: - self.deadline_urls = {"default": deadline_url} - else: - self.deadline_urls = deadline_settings.get("deadline_urls") # noqa: E501 + deadline_urls = {} + enabled = self.name in studio_settings + if enabled: + deadline_settings = studio_settings[self.name] + deadline_urls = { + url_item["name"]: url_item["value"] + for url_item in deadline_settings["deadline_urls"] + } - if not self.deadline_urls: - self.enabled = False - self.log.warning(("default Deadline Webservice URL " - "not specified. Disabling module.")) - return + if enabled and not deadline_urls: + enabled = False + self.log.warning(( + "Deadline Webservice URLs are not specified. Disabling addon." + )) + + self.enabled = enabled + self.deadline_urls = deadline_urls def get_plugin_paths(self): """Deadline plugin paths.""" diff --git a/client/ayon_core/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py b/client/ayon_core/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py index 0cfe7c9b39..ea4b7a213e 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py +++ b/client/ayon_core/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py @@ -46,12 +46,12 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): from maya import cmds deadline_settings = ( render_instance.context.data - ["system_settings"] - ["modules"] + ["project_settings"] ["deadline"] ) default_server = render_instance.context.data["defaultDeadline"] + # QUESTION How and where is this is set? Should be removed? instance_server = render_instance.data.get("deadlineServers") if not instance_server: self.log.debug("Using default server.") @@ -64,7 +64,10 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): asString=True ) - default_servers = deadline_settings["deadline_urls"] + default_servers = { + url_item["name"]: url_item["value"] + for url_item in deadline_settings["deadline_urls"] + } project_servers = ( render_instance.context.data ["project_settings"] diff --git a/client/ayon_core/modules/deadline/plugins/publish/collect_pools.py b/client/ayon_core/modules/deadline/plugins/publish/collect_pools.py index 6c35012173..25951a56b6 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/collect_pools.py +++ b/client/ayon_core/modules/deadline/plugins/publish/collect_pools.py @@ -44,7 +44,7 @@ class CollectDeadlinePools(pyblish.api.InstancePlugin, secondary_pool = None @classmethod - def apply_settings(cls, project_settings, system_settings): + def apply_settings(cls, project_settings): # deadline.publish.CollectDeadlinePools settings = project_settings["deadline"]["publish"]["CollectDeadlinePools"] # noqa cls.primary_pool = settings.get("primary_pool", None) diff --git a/client/ayon_core/modules/deadline/plugins/publish/collect_publishable_instances.py b/client/ayon_core/modules/deadline/plugins/publish/collect_publishable_instances.py deleted file mode 100644 index 347da86360..0000000000 --- a/client/ayon_core/modules/deadline/plugins/publish/collect_publishable_instances.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- coding: utf-8 -*- -"""Collect instances that should be processed and published on DL. - -""" -import os - -import pyblish.api -from ayon_core.pipeline import PublishValidationError - - -class CollectDeadlinePublishableInstances(pyblish.api.InstancePlugin): - """Collect instances that should be processed and published on DL. - - Some long running publishes (not just renders) could be offloaded to DL, - this plugin compares theirs name against env variable, marks only - publishable by farm. - - Triggered only when running only in headless mode, eg on a farm. - """ - - order = pyblish.api.CollectorOrder + 0.499 - label = "Collect Deadline Publishable Instance" - targets = ["remote"] - - def process(self, instance): - self.log.debug("CollectDeadlinePublishableInstances") - publish_inst = os.environ.get("OPENPYPE_PUBLISH_SUBSET", '') - if not publish_inst: - raise PublishValidationError("OPENPYPE_PUBLISH_SUBSET env var " - "required for remote publishing") - - subset_name = instance.data["subset"] - if subset_name == publish_inst: - self.log.debug("Publish {}".format(subset_name)) - instance.data["publish"] = True - instance.data["farm"] = False - else: - self.log.debug("Skipping {}".format(subset_name)) - instance.data["publish"] = False diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index f7bc5529fb..a284464009 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -7,11 +7,10 @@ from datetime import datetime from ayon_core.lib import ( env_value_to_bool, collect_frames, + is_in_tests, ) -from ayon_core.pipeline import legacy_io from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo -from ayon_core.tests.lib import is_in_tests @attr.s @@ -81,16 +80,20 @@ class AfterEffectsSubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", + "AYON_PROJECT_NAME", + "AYON_FOLDER_PATH", + "AYON_TASK_NAME", + "AYON_WORKDIR", + "AYON_APP_NAME", "AYON_LOG_NO_COLORS", "IS_TEST" ] - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) + environment = { + key: os.environ[key] + for key in keys + if key in os.environ + } for key in keys: value = environment.get(key) if value: @@ -109,7 +112,7 @@ class AfterEffectsSubmitDeadline( file_name, frame = list(collect_frames([render_path]).items())[0] if frame: # replace frame ('000001') with Deadline's required '[#######]' - # expects filename in format project_asset_subset_version.FRAME.ext + # expects filename in format project_folder_product_version.FRAME.ext render_dir = os.path.dirname(render_path) file_name = os.path.basename(render_path) hashed = '[{}]'.format(len(frame) * "#") diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py index c8b72ca52b..ae19e63a37 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py @@ -10,11 +10,10 @@ from ayon_core.lib import ( BoolDef, NumberDef, TextDef, + is_in_tests, ) -from ayon_core.pipeline import legacy_io from ayon_core.pipeline.publish import AYONPyblishPluginMixin from ayon_core.pipeline.farm.tools import iter_expected_files -from ayon_core.tests.lib import is_in_tests from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @@ -103,15 +102,19 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", + "AYON_PROJECT_NAME", + "AYON_FOLDER_PATH", + "AYON_TASK_NAME", + "AYON_WORKDIR", + "AYON_APP_NAME", "IS_TEST" ] - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) + environment = { + key: os.environ[key] + for key in keys + if key in os.environ + } for key in keys: value = environment.get(key) diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_celaction_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_celaction_deadline.py index 47a0a25755..bc3636da63 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -75,7 +75,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): script_name = os.path.basename(script_path) for item in instance.context: - if "workfile" in item.data["family"]: + if "workfile" in item.data["productType"]: msg = "Workfile (scene) must be published along" assert item.data["publish"] is True, msg diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py index 77505eb623..837ed91c60 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py @@ -6,7 +6,6 @@ import requests import pyblish.api -from ayon_core.pipeline import legacy_io from ayon_core.pipeline.publish import ( AYONPyblishPluginMixin ) @@ -105,7 +104,7 @@ class FusionSubmitDeadline( # Collect all saver instances in context that are to be rendered saver_instances = [] for instance in context: - if instance.data["family"] != "render": + if instance.data["productType"] != "render": # Allow only saver family instances continue @@ -221,17 +220,21 @@ class FusionSubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", + "AYON_PROJECT_NAME", + "AYON_FOLDER_PATH", + "AYON_TASK_NAME", + "AYON_WORKDIR", + "AYON_APP_NAME", "AYON_LOG_NO_COLORS", "IS_TEST", "AYON_BUNDLE_NAME", ] - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) + environment = { + key: os.environ[key] + for key in keys + if key in os.environ + } # to recognize render jobs environment["AYON_RENDER_JOB"] = "1" diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py index f2f1c90559..beb8afc3a3 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -10,10 +10,9 @@ from datetime import datetime import attr import pyblish.api -from ayon_core.pipeline import legacy_io from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo -from ayon_core.tests.lib import is_in_tests +from ayon_core.lib import is_in_tests class _ZipFile(ZipFile): @@ -274,16 +273,20 @@ class HarmonySubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", + "AYON_PROJECT_NAME", + "AYON_FOLDER_PATH", + "AYON_TASK_NAME", + "AYON_WORKDIR", + "AYON_APP_NAME", "AYON_LOG_NO_COLORS" "IS_TEST" ] - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) + environment = { + key: os.environ[key] + for key in keys + if key in os.environ + } for key in keys: value = environment.get(key) if value: diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_cache_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_cache_deadline.py index eed930e372..94e0947952 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_cache_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_cache_deadline.py @@ -7,12 +7,11 @@ import pyblish.api from ayon_core.lib import ( TextDef, NumberDef, + is_in_tests, ) from ayon_core.pipeline import ( - legacy_io, AYONPyblishPluginMixin ) -from ayon_core.tests.lib import is_in_tests from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @@ -99,15 +98,19 @@ class HoudiniCacheSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", + "AYON_PROJECT_NAME", + "AYON_FOLDER_PATH", + "AYON_TASK_NAME", + "AYON_WORKDIR", + "AYON_APP_NAME", "AYON_LOG_NO_COLORS", ] - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) + environment = { + key: os.environ[key] + for key in keys + if key in os.environ + } for key in keys: value = environment.get(key) diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index 9988248957..486fdfd634 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -5,11 +5,11 @@ from datetime import datetime import pyblish.api -from ayon_core.pipeline import legacy_io, AYONPyblishPluginMixin -from ayon_core.tests.lib import is_in_tests +from ayon_core.pipeline import AYONPyblishPluginMixin from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo from ayon_core.lib import ( + is_in_tests, BoolDef, NumberDef ) @@ -44,7 +44,10 @@ class VrayRenderPluginInfo(): @attr.s class RedshiftRenderPluginInfo(): SceneFile = attr.ib(default=None) - Version = attr.ib(default=None) + # Use "1" as the default Redshift version just because it + # default fallback version in Deadline's Redshift plugin + # if no version was specified + Version = attr.ib(default="1") class HoudiniSubmitDeadline( @@ -142,7 +145,9 @@ class HoudiniSubmitDeadline( if split_render_job and not is_export_job: # Convert from family to Deadline plugin name # i.e., arnold_rop -> Arnold - plugin = instance.data["family"].replace("_rop", "").capitalize() + plugin = ( + instance.data["productType"].replace("_rop", "").capitalize() + ) else: plugin = "Houdini" if split_render_job: @@ -204,15 +209,19 @@ class HoudiniSubmitDeadline( "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", + "AYON_PROJECT_NAME", + "AYON_FOLDER_PATH", + "AYON_TASK_NAME", + "AYON_WORKDIR", + "AYON_APP_NAME", "AYON_LOG_NO_COLORS", ] - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) + environment = { + key: os.environ[key] + for key in keys + if key in os.environ + } for key in keys: value = environment.get(key) @@ -245,21 +254,21 @@ class HoudiniSubmitDeadline( # Output driver to render if job_type == "render": - family = instance.data.get("family") - if family == "arnold_rop": + product_type = instance.data.get("productType") + if product_type == "arnold_rop": plugin_info = ArnoldRenderDeadlinePluginInfo( InputFile=instance.data["ifdFile"] ) - elif family == "mantra_rop": + elif product_type == "mantra_rop": plugin_info = MantraRenderDeadlinePluginInfo( SceneFile=instance.data["ifdFile"], Version=hou_major_minor, ) - elif family == "vray_rop": + elif product_type == "vray_rop": plugin_info = VrayRenderPluginInfo( InputFilename=instance.data["ifdFile"], ) - elif family == "redshift_rop": + elif product_type == "redshift_rop": plugin_info = RedshiftRenderPluginInfo( SceneFile=instance.data["ifdFile"] ) @@ -280,8 +289,8 @@ class HoudiniSubmitDeadline( else: self.log.error( - "Family '%s' not supported yet to split render job", - family + "Product type '%s' not supported yet to split render job", + product_type ) return else: diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py index 0a7c96008e..1abefa515a 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py @@ -9,7 +9,6 @@ from ayon_core.lib import ( NumberDef, ) from ayon_core.pipeline import ( - legacy_io, AYONPyblishPluginMixin ) from ayon_core.pipeline.publish.lib import ( @@ -49,7 +48,7 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, group = None @classmethod - def apply_settings(cls, project_settings, system_settings): + def apply_settings(cls, project_settings): settings = project_settings["deadline"]["publish"]["MaxSubmitDeadline"] # noqa # Take some defaults from settings @@ -107,15 +106,19 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", + "AYON_PROJECT_NAME", + "AYON_FOLDER_PATH", + "AYON_TASK_NAME", + "AYON_WORKDIR", + "AYON_APP_NAME", "IS_TEST" ] - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) + environment = { + key: os.environ[key] + for key in keys + if key in os.environ + } for key in keys: value = environment.get(key) diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py index 84e6e93e6a..0e871eb90e 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -18,6 +18,7 @@ Attributes: from __future__ import print_function import os +import json import getpass import copy import re @@ -29,21 +30,20 @@ from collections import OrderedDict import attr from ayon_core.pipeline import ( - legacy_io, AYONPyblishPluginMixin ) from ayon_core.lib import ( BoolDef, NumberDef, TextDef, - EnumDef + EnumDef, + is_in_tests, ) from ayon_core.hosts.maya.api.lib_rendersettings import RenderSettings from ayon_core.hosts.maya.api.lib import get_attr_in_layer from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo -from ayon_core.tests.lib import is_in_tests from ayon_core.pipeline.farm.tools import iter_expected_files @@ -116,7 +116,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, strict_error_checking = True @classmethod - def apply_settings(cls, project_settings, system_settings): + def apply_settings(cls, project_settings): settings = project_settings["deadline"]["publish"]["MayaSubmitDeadline"] # noqa # Take some defaults from settings @@ -131,8 +131,15 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, cls.group = settings.get("group", cls.group) cls.strict_error_checking = settings.get("strict_error_checking", cls.strict_error_checking) - cls.jobInfo = settings.get("jobInfo", cls.jobInfo) - cls.pluginInfo = settings.get("pluginInfo", cls.pluginInfo) + job_info = settings.get("jobInfo") + if job_info: + job_info = json.loads(job_info) + plugin_info = settings.get("pluginInfo") + if plugin_info: + plugin_info = json.loads(plugin_info) + + cls.jobInfo = job_info or cls.jobInfo + cls.pluginInfo = plugin_info or cls.pluginInfo def get_job_info(self): job_info = DeadlineJobInfo(Plugin="MayaBatch") @@ -200,15 +207,19 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", + "AYON_PROJECT_NAME", + "AYON_FOLDER_PATH", + "AYON_TASK_NAME", + "AYON_WORKDIR", + "AYON_APP_NAME", "IS_TEST" ] - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) + environment = { + key: os.environ[key] + for key in keys + if key in os.environ + } for key in keys: value = environment.get(key) @@ -248,7 +259,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, default_rs_include_lights = ( instance.context.data['project_settings'] ['maya'] - ['RenderSettings'] + ['render_settings'] ['enable_all_lights'] ) diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py deleted file mode 100644 index 02338c5c32..0000000000 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py +++ /dev/null @@ -1,129 +0,0 @@ -import os -import attr -from datetime import datetime - -from ayon_core.pipeline import legacy_io, PublishXmlValidationError -from ayon_core.tests.lib import is_in_tests -from openpype_modules.deadline import abstract_submit_deadline -from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo - -import pyblish.api - - -@attr.s -class MayaPluginInfo(object): - Build = attr.ib(default=None) # Don't force build - StrictErrorChecking = attr.ib(default=True) - - SceneFile = attr.ib(default=None) # Input scene - Version = attr.ib(default=None) # Mandatory for Deadline - ProjectPath = attr.ib(default=None) - - ScriptJob = attr.ib(default=True) - ScriptFilename = attr.ib(default=None) - - -class MayaSubmitRemotePublishDeadline( - abstract_submit_deadline.AbstractSubmitDeadline): - """Submit Maya scene to perform a local publish in Deadline. - - Publishing in Deadline can be helpful for scenes that publish very slow. - This way it can process in the background on another machine without the - Artist having to wait for the publish to finish on their local machine. - - Submission is done through the Deadline Web Service. DL then triggers - `openpype/scripts/remote_publish.py`. - - Each publishable instance creates its own full publish job. - - Different from `ProcessSubmittedJobOnFarm` which creates publish job - depending on metadata json containing context and instance data of - rendered files. - """ - - label = "Submit Scene to Deadline" - order = pyblish.api.IntegratorOrder - hosts = ["maya"] - families = ["publish.farm"] - targets = ["local"] - - def process(self, instance): - - # Ensure no errors so far - if not (all(result["success"] - for result in instance.context.data["results"])): - raise PublishXmlValidationError("Publish process has errors") - - if not instance.data["publish"]: - self.log.warning("No active instances found. " - "Skipping submission..") - return - - super(MayaSubmitRemotePublishDeadline, self).process(instance) - - def get_job_info(self): - instance = self._instance - context = instance.context - - project_name = instance.context.data["projectName"] - scene = instance.context.data["currentFile"] - scenename = os.path.basename(scene) - - job_name = "{scene} [PUBLISH]".format(scene=scenename) - batch_name = "{code} - {scene}".format(code=project_name, - scene=scenename) - - if is_in_tests(): - batch_name += datetime.now().strftime("%d%m%Y%H%M%S") - - job_info = DeadlineJobInfo(Plugin="MayaBatch") - job_info.BatchName = batch_name - job_info.Name = job_name - job_info.UserName = context.data.get("user") - job_info.Comment = context.data.get("comment", "") - - # use setting for publish job on farm, no reason to have it separately - project_settings = context.data["project_settings"] - deadline_publish_job_sett = project_settings["deadline"]["publish"]["ProcessSubmittedJobOnFarm"] # noqa - job_info.Department = deadline_publish_job_sett["deadline_department"] - job_info.ChunkSize = deadline_publish_job_sett["deadline_chunk_size"] - job_info.Priority = deadline_publish_job_sett["deadline_priority"] - job_info.Group = deadline_publish_job_sett["deadline_group"] - job_info.Pool = deadline_publish_job_sett["deadline_pool"] - - # Include critical environment variables with submission + Session - keys = [ - "FTRACK_API_USER", - "FTRACK_API_KEY", - "FTRACK_SERVER" - ] - - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) - - # TODO replace legacy_io with context.data - environment["AVALON_PROJECT"] = project_name - environment["AVALON_ASSET"] = instance.context.data["asset"] - environment["AVALON_TASK"] = instance.context.data["task"] - environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME") - environment["OPENPYPE_PUBLISH_SUBSET"] = instance.data["subset"] - environment["AYON_LOG_NO_COLORS"] = "1" - environment["AYON_USERNAME"] = instance.context.data["user"] - environment["AYON_REMOTE_PUBLISH"] = "1" - - for key, value in environment.items(): - job_info.EnvironmentKeyValue[key] = value - - def get_plugin_info(self): - # Not all hosts can import this module. - from maya import cmds - scene = self._instance.context.data["currentFile"] - - plugin_info = MayaPluginInfo() - plugin_info.SceneFile = scene - plugin_info.ScriptFilename = "{OPENPYPE_REPOS_ROOT}/openpype/scripts/remote_publish.py" # noqa - plugin_info.Version = cmds.about(version=True) - plugin_info.ProjectPath = cmds.workspace(query=True, - rootDirectory=True) - - return attr.asdict(plugin_info) diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py index 9fff8edee6..a3111454b3 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -7,12 +7,11 @@ from datetime import datetime import requests import pyblish.api -from ayon_core.pipeline import legacy_io from ayon_core.pipeline.publish import ( AYONPyblishPluginMixin ) -from ayon_core.tests.lib import is_in_tests from ayon_core.lib import ( + is_in_tests, BoolDef, NumberDef ) @@ -40,10 +39,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, concurrent_tasks = 1 group = "" department = "" - limit_groups = {} + limit_groups = [] use_gpu = False env_allowed_keys = [] - env_search_replace_values = {} + env_search_replace_values = [] workfile_dependency = True use_published_workfile = True @@ -184,11 +183,13 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, b_job_response.json()["_id"]) # redefinition of families - if "render" in instance.data["family"]: - instance.data['family'] = 'write' + if "render" in instance.data["productType"]: + instance.data["family"] = "write" + instance.data["productType"] = "write" families.insert(0, "render2d") - elif "prerender" in instance.data["family"]: - instance.data['family'] = 'write' + elif "prerender" in instance.data["productType"]: + instance.data["family"] = "write" + instance.data["productType"] = "write" families.insert(0, "prerender") instance.data["families"] = families @@ -197,7 +198,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, AbstractSubmitDeadline""" for instance in context: if ( - instance.data["family"] != "workfile" + instance.data["productType"] != "workfile" # Disabled instances won't be integrated or instance.data("publish") is False ): @@ -374,10 +375,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, keys = [ "PYTHONPATH", "PATH", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", + "AYON_PROJECT_NAME", + "AYON_FOLDER_PATH", + "AYON_TASK_NAME", + "AYON_APP_NAME", "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", @@ -393,8 +394,11 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, if self.env_allowed_keys: keys += self.env_allowed_keys - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) + environment = { + key: os.environ[key] + for key in keys + if key in os.environ + } # to recognize render jobs environment["AYON_RENDER_JOB"] = "1" @@ -402,8 +406,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, # finally search replace in values of any key if self.env_search_replace_values: for key, value in environment.items(): - for _k, _v in self.env_search_replace_values.items(): - environment[key] = value.replace(_k, _v) + for item in self.env_search_replace_values: + environment[key] = value.replace( + item["name"], item["value"] + ) payload["JobInfo"].update({ "EnvironmentKeyValue%d" % index: "{key}={value}".format( @@ -539,8 +545,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, import nuke captured_groups = [] - for lg_name, list_node_class in self.limit_groups.items(): - for node_class in list_node_class: + for limit_group in self.limit_groups: + lg_name = limit_group["name"] + + for node_class in limit_group["value"]: for node in nuke.allNodes(recurseGroups=True): # ignore all nodes not member of defined class if node.Class() not in node_class: diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py index a90397baa2..3e95049e56 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py @@ -11,15 +11,14 @@ import pyblish.api from ayon_core.client import ( get_last_version_by_subset_name, ) -from ayon_core.pipeline import publish, legacy_io -from ayon_core.lib import EnumDef, is_running_from_build -from ayon_core.tests.lib import is_in_tests +from ayon_core.pipeline import publish +from ayon_core.lib import EnumDef, is_in_tests from ayon_core.pipeline.version_start import get_versioning_start from ayon_core.pipeline.farm.pyblish_functions import ( create_skeleton_instance_cache, create_instances_for_cache, - attach_instances_to_subset, + attach_instances_to_product, prepare_cache_representations, create_metadata_path ) @@ -68,7 +67,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, "FTRACK_API_USER", "FTRACK_API_KEY", "FTRACK_SERVER", - "AVALON_APP_NAME", + "AYON_APP_NAME", "AYON_USERNAME", "OPENPYPE_SG_USER", "KITSU_LOGIN", @@ -98,12 +97,12 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, (str): deadline_publish_job_id """ data = instance.data.copy() - subset = data["subset"] - job_name = "Publish - {subset}".format(subset=subset) + product_name = data["productName"] + job_name = "Publish - {}".format(product_name) anatomy = instance.context.data['anatomy'] - # instance.data.get("subset") != instances[0]["subset"] + # instance.data.get("productName") != instances[0]["productName"] # 'Main' vs 'renderMain' override_version = None instance_version = instance.data.get("version") # take this if exists @@ -113,10 +112,10 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, output_dir = self._get_publish_folder( anatomy, deepcopy(instance.data["anatomyData"]), - instance.data.get("asset"), - instance.data["subset"], + instance.data.get("folderPath"), + instance.data["productName"], instance.context, - instance.data["family"], + instance.data["productType"], override_version ) @@ -126,9 +125,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, create_metadata_path(instance, anatomy) environment = { - "AVALON_PROJECT": instance.context.data["projectName"], - "AVALON_ASSET": instance.context.data["asset"], - "AVALON_TASK": instance.context.data["task"], + "AYON_PROJECT_NAME": instance.context.data["projectName"], + "AYON_FOLDER_PATH": instance.context.data["folderPath"], + "AYON_TASK_NAME": instance.context.data["task"], "AYON_USERNAME": instance.context.data["user"], "AYON_LOG_NO_COLORS": "1", "IS_TEST": str(int(is_in_tests())), @@ -260,7 +259,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, } ] - This will create instances for `beauty` and `Z` subset + This will create instances for `beauty` and `Z` product adding those files to their respective representations. If we have only list of files, we collect all file sequences. @@ -298,9 +297,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, instance_skeleton_data["representations"] += representations instances = [instance_skeleton_data] - # attach instances to subset + # attach instances to product if instance.data.get("attachTo"): - instances = attach_instances_to_subset( + instances = attach_instances_to_product( instance.data.get("attachTo"), instances ) @@ -360,7 +359,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, # publish job file publish_job = { - "asset": instance_skeleton_data["asset"], + "folderPath": instance_skeleton_data["folderPath"], "frameStart": instance_skeleton_data["frameStart"], "frameEnd": instance_skeleton_data["frameEnd"], "fps": instance_skeleton_data["fps"], @@ -370,7 +369,6 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, "intent": instance.context.data.get("intent"), "comment": instance.context.data.get("comment"), "job": render_job or None, - "session": legacy_io.Session.copy(), "instances": instances } @@ -384,23 +382,24 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, json.dump(publish_job, f, indent=4, sort_keys=True) def _get_publish_folder(self, anatomy, template_data, - asset, subset, context, - family, version=None): + asset, product_name, context, + product_type, version=None): """ Extracted logic to pre-calculate real publish folder, which is calculated in IntegrateNew inside of Deadline process. This should match logic in: 'collect_anatomy_instance_data' - to - get correct anatomy, family, version for subset and + get correct anatomy, family, version for product and 'collect_resources_path' get publish_path Args: anatomy (ayon_core.pipeline.anatomy.Anatomy): template_data (dict): pre-calculated collected data for process - asset (string): asset name - subset (string): subset name (actually group name of subset) - family (string): for current deadline process it's always 'render' + asset (str): asset name + product_name (str): Product name (actually group name of product). + product_type (str): for current deadline process it's always + 'render' TODO - for generic use family needs to be dynamically calculated like IntegrateNew does version (int): override version from instance if exists @@ -415,7 +414,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, if not version: version = get_last_version_by_subset_name( project_name, - subset, + product_name, asset_name=asset ) if version: @@ -426,8 +425,8 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, template_data["app"], task_name=template_data["task"]["name"], task_type=template_data["task"]["type"], - family="render", - subset=subset, + product_type="render", + product_name=product_name, project_settings=context.data["project_settings"] ) @@ -437,14 +436,18 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, template_name = publish.get_publish_template_name( project_name, host_name, - family, + product_type, task_info.get("name"), task_info.get("type"), ) - template_data["subset"] = subset - template_data["family"] = family + template_data["subset"] = product_name + template_data["family"] = product_type template_data["version"] = version + template_data["product"] = { + "name": product_name, + "type": product_type, + } render_templates = anatomy.templates_obj[template_name] if "folder" in render_templates: diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py index bd343e103a..7bc13ae4b6 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py @@ -12,15 +12,14 @@ import pyblish.api from ayon_core.client import ( get_last_version_by_subset_name, ) -from ayon_core.pipeline import publish, legacy_io -from ayon_core.lib import EnumDef, is_running_from_build -from ayon_core.tests.lib import is_in_tests +from ayon_core.pipeline import publish +from ayon_core.lib import EnumDef, is_in_tests from ayon_core.pipeline.version_start import get_versioning_start from ayon_core.pipeline.farm.pyblish_functions import ( create_skeleton_instance, create_instances_for_aov, - attach_instances_to_subset, + attach_instances_to_product, prepare_representations, create_metadata_path ) @@ -99,18 +98,39 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, "karma_rop", "vray_rop", "redshift_rop"] - aov_filter = {"maya": [r".*([Bb]eauty).*"], - "blender": [r".*([Bb]eauty).*"], - "aftereffects": [r".*"], # for everything from AE - "harmony": [r".*"], # for everything from AE - "celaction": [r".*"], - "max": [r".*"]} + aov_filter = [ + { + "name": "maya", + "value": [r".*([Bb]eauty).*"] + }, + { + "name": "blender", + "value": [r".*([Bb]eauty).*"] + }, + { + # for everything from AE + "name": "aftereffects", + "value": [r".*"] + }, + { + "name": "harmony", + "value": [r".*"] + }, + { + "name": "celaction", + "value": [r".*"] + }, + { + "name": "max", + "value": [r".*"] + }, + ] environ_keys = [ "FTRACK_API_USER", "FTRACK_API_KEY", "FTRACK_SERVER", - "AVALON_APP_NAME", + "AYON_APP_NAME", "AYON_USERNAME", "OPENPYPE_SG_USER", "KITSU_LOGIN", @@ -154,12 +174,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, (str): deadline_publish_job_id """ data = instance.data.copy() - subset = data["subset"] - job_name = "Publish - {subset}".format(subset=subset) + product_name = data["productName"] + job_name = "Publish - {}".format(product_name) anatomy = instance.context.data['anatomy'] - # instance.data.get("subset") != instances[0]["subset"] + # instance.data.get("productName") != instances[0]["productName"] # 'Main' vs 'renderMain' override_version = None instance_version = instance.data.get("version") # take this if exists @@ -169,10 +189,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, output_dir = self._get_publish_folder( anatomy, deepcopy(instance.data["anatomyData"]), - instance.data.get("asset"), - instances[0]["subset"], + instance.data.get("folderPath"), + instances[0]["productName"], instance.context, - instances[0]["family"], + instances[0]["productType"], override_version ) @@ -182,9 +202,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, create_metadata_path(instance, anatomy) environment = { - "AVALON_PROJECT": instance.context.data["projectName"], - "AVALON_ASSET": instance.context.data["asset"], - "AVALON_TASK": instance.context.data["task"], + "AYON_PROJECT_NAME": instance.context.data["projectName"], + "AYON_FOLDER_PATH": instance.context.data["folderPath"], + "AYON_TASK_NAME": instance.context.data["task"], "AYON_USERNAME": instance.context.data["user"], "AYON_LOG_NO_COLORS": "1", "IS_TEST": str(int(is_in_tests())), @@ -310,151 +330,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, self.log.debug("Skipping local instance.") return - data = instance.data.copy() - context = instance.context - self.context = context - self.anatomy = instance.context.data["anatomy"] - - asset = data.get("asset") or context.data["asset"] - subset = data.get("subset") - - start = instance.data.get("frameStart") - if start is None: - start = context.data["frameStart"] - - end = instance.data.get("frameEnd") - if end is None: - end = context.data["frameEnd"] - - handle_start = instance.data.get("handleStart") - if handle_start is None: - handle_start = context.data["handleStart"] - - handle_end = instance.data.get("handleEnd") - if handle_end is None: - handle_end = context.data["handleEnd"] - - fps = instance.data.get("fps") - if fps is None: - fps = context.data["fps"] - - if data.get("extendFrames", False): - start, end = self._extend_frames( - asset, - subset, - start, - end, - data["overrideExistingFrame"]) - - try: - source = data["source"] - except KeyError: - source = context.data["currentFile"] - - success, rootless_path = ( - self.anatomy.find_root_template_from_path(source) - ) - if success: - source = rootless_path - - else: - # `rootless_path` is not set to `source` if none of roots match - self.log.warning(( - "Could not find root path for remapping \"{}\"." - " This may cause issues." - ).format(source)) - - family = "render" - if ("prerender" in instance.data["families"] or - "prerender.farm" in instance.data["families"]): - family = "prerender" - families = [family] - - # pass review to families if marked as review - do_not_add_review = False - if data.get("review"): - families.append("review") - elif data.get("review") is False: - self.log.debug("Instance has review explicitly disabled.") - do_not_add_review = True - - instance_skeleton_data = { - "family": family, - "subset": subset, - "families": families, - "asset": asset, - "frameStart": start, - "frameEnd": end, - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStartHandle": start - handle_start, - "frameEndHandle": end + handle_end, - "comment": instance.data["comment"], - "fps": fps, - "source": source, - "extendFrames": data.get("extendFrames"), - "overrideExistingFrame": data.get("overrideExistingFrame"), - "pixelAspect": data.get("pixelAspect", 1), - "resolutionWidth": data.get("resolutionWidth", 1920), - "resolutionHeight": data.get("resolutionHeight", 1080), - "multipartExr": data.get("multipartExr", False), - "jobBatchName": data.get("jobBatchName", ""), - "useSequenceForReview": data.get("useSequenceForReview", True), - # map inputVersions `ObjectId` -> `str` so json supports it - "inputVersions": list(map(str, data.get("inputVersions", []))), - "colorspace": instance.data.get("colorspace"), - "stagingDir_persistent": instance.data.get( - "stagingDir_persistent", False - ) - } - - # skip locking version if we are creating v01 - instance_version = instance.data.get("version") # take this if exists - if instance_version != 1: - instance_skeleton_data["version"] = instance_version - - # transfer specific families from original instance to new render - for item in self.families_transfer: - if item in instance.data.get("families", []): - instance_skeleton_data["families"] += [item] - - # transfer specific properties from original instance based on - # mapping dictionary `instance_transfer` - for key, values in self.instance_transfer.items(): - if key in instance.data.get("families", []): - for v in values: - instance_skeleton_data[v] = instance.data.get(v) - - # look into instance data if representations are not having any - # which are having tag `publish_on_farm` and include them - for repre in instance.data.get("representations", []): - staging_dir = repre.get("stagingDir") - if staging_dir: - success, rootless_staging_dir = ( - self.anatomy.find_root_template_from_path( - staging_dir - ) - ) - if success: - repre["stagingDir"] = rootless_staging_dir - else: - self.log.warning(( - "Could not find root path for remapping \"{}\"." - " This may cause issues on farm." - ).format(staging_dir)) - repre["stagingDir"] = staging_dir - - if "publish_on_farm" in repre.get("tags"): - # create representations attribute of not there - if "representations" not in instance_skeleton_data.keys(): - instance_skeleton_data["representations"] = [] - - instance_skeleton_data["representations"].append(repre) - - instances = None - assert data.get("expectedFiles"), ("Submission from old Pype version" - " - missing expectedFiles") - anatomy = instance.context.data["anatomy"] instance_skeleton_data = create_skeleton_instance( @@ -481,7 +356,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, } ] - This will create instances for `beauty` and `Z` subset + This will create instances for `beauty` and `Z` product adding those files to their respective representations. If we have only list of files, we collect all file sequences. @@ -506,17 +381,23 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, self.log.debug("Instance has review explicitly disabled.") do_not_add_review = True + aov_filter = { + item["name"]: item["value"] + for item in self.aov_filter + } if isinstance(instance.data.get("expectedFiles")[0], dict): instances = create_instances_for_aov( instance, instance_skeleton_data, - self.aov_filter, self.skip_integration_repre_list, - do_not_add_review) + aov_filter, + self.skip_integration_repre_list, + do_not_add_review + ) else: representations = prepare_representations( instance_skeleton_data, instance.data.get("expectedFiles"), anatomy, - self.aov_filter, + aov_filter, self.skip_integration_repre_list, do_not_add_review, instance.context, @@ -530,9 +411,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, instance_skeleton_data["representations"] += representations instances = [instance_skeleton_data] - # attach instances to subset + # attach instances to product if instance.data.get("attachTo"): - instances = attach_instances_to_subset( + instances = attach_instances_to_product( instance.data.get("attachTo"), instances ) @@ -594,7 +475,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, # publish job file publish_job = { - "asset": instance_skeleton_data["asset"], + "folderPath": instance_skeleton_data["folderPath"], "frameStart": instance_skeleton_data["frameStart"], "frameEnd": instance_skeleton_data["frameEnd"], "fps": instance_skeleton_data["fps"], @@ -604,7 +485,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, "intent": instance.context.data.get("intent"), "comment": instance.context.data.get("comment"), "job": render_job or None, - "session": legacy_io.Session.copy(), "instances": instances } @@ -623,14 +503,14 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, json.dump(publish_job, f, indent=4, sort_keys=True) def _get_publish_folder(self, anatomy, template_data, - asset, subset, context, - family, version=None): + asset, product_name, context, + product_type, version=None): """ Extracted logic to pre-calculate real publish folder, which is calculated in IntegrateNew inside of Deadline process. This should match logic in: 'collect_anatomy_instance_data' - to - get correct anatomy, family, version for subset and + get correct anatomy, family, version for product name and 'collect_resources_path' get publish_path @@ -638,8 +518,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, anatomy (ayon_core.pipeline.anatomy.Anatomy): template_data (dict): pre-calculated collected data for process asset (string): asset name - subset (string): subset name (actually group name of subset) - family (string): for current deadline process it's always 'render' + product_name (string): Product name (actually group name + of product) + product_type (string): for current deadline process it's always + 'render' TODO - for generic use family needs to be dynamically calculated like IntegrateNew does version (int): override version from instance if exists @@ -655,7 +537,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, if not version: version = get_last_version_by_subset_name( project_name, - subset, + product_name, asset_name=asset ) if version: @@ -666,8 +548,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, host_name, task_name=template_data["task"]["name"], task_type=template_data["task"]["type"], - family="render", - subset=subset, + product_type="render", + product_name=product_name, project_settings=context.data["project_settings"] ) @@ -677,14 +559,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, template_name = publish.get_publish_template_name( project_name, host_name, - family, + product_type, task_info.get("name"), task_info.get("type"), ) - template_data["subset"] = subset - template_data["family"] = family template_data["version"] = version + template_data["subset"] = product_name + template_data["family"] = product_type + template_data["product"] = { + "name": product_name, + "type": product_type, + } render_templates = anatomy.templates_obj[template_name] if "folder" in render_templates: diff --git a/client/ayon_core/modules/deadline/repository/custom/plugins/Ayon/Ayon.py b/client/ayon_core/modules/deadline/repository/custom/plugins/Ayon/Ayon.py index a1f752605d..de0a2c6d7a 100644 --- a/client/ayon_core/modules/deadline/repository/custom/plugins/Ayon/Ayon.py +++ b/client/ayon_core/modules/deadline/repository/custom/plugins/Ayon/Ayon.py @@ -15,6 +15,7 @@ import re import os import platform +__version__ = "1.0.0" ###################################################################### # This is the function that Deadline calls to get an instance of the @@ -52,6 +53,9 @@ class AyonDeadlinePlugin(DeadlinePlugin): del self.RenderArgumentCallback def InitializeProcess(self): + self.LogInfo( + "Initializing process with AYON plugin {}".format(__version__) + ) self.PluginType = PluginType.Simple self.StdoutHandling = True diff --git a/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 81aab00b93..1565b2c496 100644 --- a/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -14,7 +14,7 @@ from Deadline.Scripting import ( DirectoryUtils, ProcessUtils, ) - +__version__ = "1.0.1" VERSION_REGEX = re.compile( r"(?P0|[1-9]\d*)" r"\.(?P0|[1-9]\d*)" @@ -471,12 +471,21 @@ def inject_ayon_environment(deadlinePlugin): ] add_kwargs = { - "project": job.GetJobEnvironmentKeyValue("AVALON_PROJECT"), - "asset": job.GetJobEnvironmentKeyValue("AVALON_ASSET"), - "task": job.GetJobEnvironmentKeyValue("AVALON_TASK"), - "app": job.GetJobEnvironmentKeyValue("AVALON_APP_NAME"), "envgroup": "farm", } + # Support backwards compatible keys + for key, env_keys in ( + ("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]), + ("asset", ["AYON_FOLDER_PATH", "AVALON_ASSET"]), + ("task", ["AYON_TASK_NAME", "AVALON_TASK"]), + ("app", ["AYON_APP_NAME", "AVALON_APP_NAME"]), + ): + value = "" + for env_key in env_keys: + value = job.GetJobEnvironmentKeyValue(env_key) + if value: + break + add_kwargs[key] = value if job.GetJobEnvironmentKeyValue("IS_TEST"): args.append("--automatic-tests") @@ -486,8 +495,8 @@ def inject_ayon_environment(deadlinePlugin): args.extend(["--{}".format(key), value]) else: raise RuntimeError(( - "Missing required env vars: AVALON_PROJECT, AVALON_ASSET," - " AVALON_TASK, AVALON_APP_NAME" + "Missing required env vars: AYON_PROJECT_NAME," + " AYON_FOLDER_PATH, AYON_TASK_NAME, AYON_APP_NAME" )) environment = { @@ -593,7 +602,7 @@ def inject_render_job_id(deadlinePlugin): def __main__(deadlinePlugin): - print("*** GlobalJobPreload start ...") + print("*** GlobalJobPreload {} start ...".format(__version__)) print(">>> Getting job ...") job = deadlinePlugin.GetJob() diff --git a/client/ayon_core/modules/job_queue/__init__.py b/client/ayon_core/modules/job_queue/__init__.py index 6f2cec1b97..0a4c62abfb 100644 --- a/client/ayon_core/modules/job_queue/__init__.py +++ b/client/ayon_core/modules/job_queue/__init__.py @@ -1,6 +1,6 @@ -from .module import JobQueueModule +from .addon import JobQueueAddon __all__ = ( - "JobQueueModule", + "JobQueueAddon", ) diff --git a/client/ayon_core/modules/job_queue/module.py b/client/ayon_core/modules/job_queue/addon.py similarity index 88% rename from client/ayon_core/modules/job_queue/module.py rename to client/ayon_core/modules/job_queue/addon.py index 1cecd62de5..32d06d0040 100644 --- a/client/ayon_core/modules/job_queue/module.py +++ b/client/ayon_core/modules/job_queue/addon.py @@ -1,4 +1,4 @@ -"""Job queue OpenPype module was created for remote execution of commands. +"""Job queue AYON addon was created for remote execution of commands. ## Why is needed Primarily created for hosts which are not easilly controlled from command line @@ -30,7 +30,7 @@ workstations know where to send or receive jobs. ### start_worker - start worker which will process jobs -- has required possitional argument which is application name from OpenPype +- has required possitional argument which is application name from AYON settings e.g. 'tvpaint/11-5' ('tvpaint' is group '11-5' is variant) - it is possible to specify server url but url from settings is used when not passed (this is added mainly for developing purposes) @@ -41,29 +41,24 @@ import json import copy import platform -from ayon_core.addon import click_wrap -from ayon_core.modules import OpenPypeModule -from ayon_core.settings import get_system_settings +from ayon_core.addon import AYONAddon, click_wrap +from ayon_core.settings import get_studio_settings -class JobQueueModule(OpenPypeModule): +class JobQueueAddon(AYONAddon): name = "job_queue" - def initialize(self, modules_settings): - module_settings = modules_settings.get(self.name) or {} - server_url = module_settings.get("server_url") or "" + def initialize(self, studio_settings): + addon_settings = studio_settings.get(self.name) or {} + server_url = addon_settings.get("server_url") or "" self._server_url = self.url_conversion(server_url) jobs_root_mapping = self._roots_mapping_conversion( - module_settings.get("jobs_root") + addon_settings.get("jobs_root") ) self._jobs_root_mapping = jobs_root_mapping - # Is always enabled - # - the module does nothing until is used - self.enabled = True - @classmethod def _root_conversion(cls, root_path): """Make sure root path does not end with slash.""" @@ -127,8 +122,8 @@ class JobQueueModule(OpenPypeModule): @classmethod def get_jobs_root_from_settings(cls): - module_settings = get_system_settings()["modules"] - jobs_root_mapping = module_settings.get(cls.name, {}).get("jobs_root") + studio_settings = get_studio_settings() + jobs_root_mapping = studio_settings.get(cls.name, {}).get("jobs_root") converted_mapping = cls._roots_mapping_conversion(jobs_root_mapping) return converted_mapping[platform.system().lower()] @@ -157,9 +152,9 @@ class JobQueueModule(OpenPypeModule): @classmethod def get_server_url_from_settings(cls): - module_settings = get_system_settings()["modules"] + studio_settings = get_studio_settings() return cls.url_conversion( - module_settings + studio_settings .get(cls.name, {}) .get("server_url") ) @@ -214,7 +209,7 @@ class JobQueueModule(OpenPypeModule): @click_wrap.group( - JobQueueModule.name, + JobQueueAddon.name, help="Application job server. Can be used as render farm." ) def cli_main(): @@ -228,7 +223,7 @@ def cli_main(): @click_wrap.option("--port", help="Server port") @click_wrap.option("--host", help="Server host (ip address)") def cli_start_server(port, host): - JobQueueModule.start_server(port, host) + JobQueueAddon.start_server(port, host) @cli_main.command( @@ -241,4 +236,4 @@ def cli_start_server(port, host): "--server_url", help="Server url which handle workers and jobs.") def cli_start_worker(app_name, server_url): - JobQueueModule.start_worker(app_name, server_url) + JobQueueAddon.start_worker(app_name, server_url) diff --git a/client/ayon_core/modules/launcher_action.py b/client/ayon_core/modules/launcher_action.py index c0266e3a57..1faf6ef4b1 100644 --- a/client/ayon_core/modules/launcher_action.py +++ b/client/ayon_core/modules/launcher_action.py @@ -1,19 +1,14 @@ import os from ayon_core import AYON_CORE_ROOT -from ayon_core.modules import ( - OpenPypeModule, - ITrayAction, -) +from ayon_core.addon import AYONAddon, ITrayAction -class LauncherAction(OpenPypeModule, ITrayAction): +class LauncherAction(AYONAddon, ITrayAction): label = "Launcher" name = "launcher_tool" - def initialize(self, _modules_settings): - # This module is always enabled - self.enabled = True + def initialize(self, settings): # Tray attributes self._window = None diff --git a/client/ayon_core/modules/library_loader_action.py b/client/ayon_core/modules/library_loader_action.py deleted file mode 100644 index 524c4f7144..0000000000 --- a/client/ayon_core/modules/library_loader_action.py +++ /dev/null @@ -1,67 +0,0 @@ -from ayon_core.modules import AYONAddon, ITrayModule - - -class LibraryLoaderAddon(AYONAddon, ITrayModule): - name = "library_tool" - - def initialize(self, modules_settings): - # Tray attributes - self._library_loader_imported = None - self._library_loader_window = None - - def tray_init(self): - # Add library tool - self._library_loader_imported = False - try: - from ayon_core.tools.loader.ui import LoaderWindow - - self._library_loader_imported = True - except Exception: - self.log.warning( - "Couldn't load Library loader tool for tray.", - exc_info=True - ) - - # Definition of Tray menu - def tray_menu(self, tray_menu): - if not self._library_loader_imported: - return - - from qtpy import QtWidgets - # Actions - action_library_loader = QtWidgets.QAction( - "Loader", tray_menu - ) - - action_library_loader.triggered.connect(self.show_library_loader) - - tray_menu.addAction(action_library_loader) - - def tray_start(self, *_a, **_kw): - return - - def tray_exit(self, *_a, **_kw): - return - - def show_library_loader(self): - if self._library_loader_window is None: - from ayon_core.pipeline import install_ayon_plugins - - self._init_library_loader() - - install_ayon_plugins() - - self._library_loader_window.show() - - # Raise and activate the window - # for MacOS - self._library_loader_window.raise_() - # for Windows - self._library_loader_window.activateWindow() - - def _init_library_loader(self): - from ayon_core.tools.loader.ui import LoaderWindow - - libraryloader = LoaderWindow() - - self._library_loader_window = libraryloader diff --git a/client/ayon_core/modules/loader_action.py b/client/ayon_core/modules/loader_action.py new file mode 100644 index 0000000000..a0cc417b66 --- /dev/null +++ b/client/ayon_core/modules/loader_action.py @@ -0,0 +1,67 @@ +from ayon_core.addon import AYONAddon, ITrayAddon + + +class LoaderAddon(AYONAddon, ITrayAddon): + name = "loader_tool" + + def initialize(self, settings): + # Tray attributes + self._loader_imported = None + self._loader_window = None + + def tray_init(self): + # Add library tool + self._loader_imported = False + try: + from ayon_core.tools.loader.ui import LoaderWindow + + self._loader_imported = True + except Exception: + self.log.warning( + "Couldn't load Loader tool for tray.", + exc_info=True + ) + + # Definition of Tray menu + def tray_menu(self, tray_menu): + if not self._loader_imported: + return + + from qtpy import QtWidgets + # Actions + action_loader = QtWidgets.QAction( + "Loader", tray_menu + ) + + action_loader.triggered.connect(self.show_loader) + + tray_menu.addAction(action_loader) + + def tray_start(self, *_a, **_kw): + return + + def tray_exit(self, *_a, **_kw): + return + + def show_loader(self): + if self._loader_window is None: + from ayon_core.pipeline import install_ayon_plugins + + self._init_loader() + + install_ayon_plugins() + + self._loader_window.show() + + # Raise and activate the window + # for MacOS + self._loader_window.raise_() + # for Windows + self._loader_window.activateWindow() + + def _init_loader(self): + from ayon_core.tools.loader.ui import LoaderWindow + + libraryloader = LoaderWindow() + + self._loader_window = libraryloader diff --git a/client/ayon_core/modules/python_console_interpreter/__init__.py b/client/ayon_core/modules/python_console_interpreter/__init__.py index 5f54ac497b..8d5c23bdba 100644 --- a/client/ayon_core/modules/python_console_interpreter/__init__.py +++ b/client/ayon_core/modules/python_console_interpreter/__init__.py @@ -1,4 +1,4 @@ -from .module import ( +from .addon import ( PythonInterpreterAction ) diff --git a/client/ayon_core/modules/python_console_interpreter/module.py b/client/ayon_core/modules/python_console_interpreter/addon.py similarity index 77% rename from client/ayon_core/modules/python_console_interpreter/module.py rename to client/ayon_core/modules/python_console_interpreter/addon.py index 7819c9cbf3..ffad3ce707 100644 --- a/client/ayon_core/modules/python_console_interpreter/module.py +++ b/client/ayon_core/modules/python_console_interpreter/addon.py @@ -1,13 +1,12 @@ -from ayon_core.modules import OpenPypeModule, ITrayAction +from ayon_core.addon import AYONAddon, ITrayAction -class PythonInterpreterAction(OpenPypeModule, ITrayAction): +class PythonInterpreterAction(AYONAddon, ITrayAction): label = "Console" name = "python_interpreter" admin_action = True - def initialize(self, modules_settings): - self.enabled = True + def initialize(self, settings): self._interpreter_window = None def tray_init(self): @@ -22,7 +21,7 @@ class PythonInterpreterAction(OpenPypeModule, ITrayAction): if self._interpreter_window: return - from openpype_modules.python_console_interpreter.window import ( + from ayon_core.modules.python_console_interpreter.window import ( PythonInterpreterWidget ) diff --git a/client/ayon_core/modules/royalrender/__init__.py b/client/ayon_core/modules/royalrender/__init__.py index cc92e3b50d..121530beda 100644 --- a/client/ayon_core/modules/royalrender/__init__.py +++ b/client/ayon_core/modules/royalrender/__init__.py @@ -1,6 +1,6 @@ -from .royal_render_module import RoyalRenderModule +from .addon import RoyalRenderAddon __all__ = ( - "RoyalRenderModule", + "RoyalRenderAddon", ) diff --git a/client/ayon_core/modules/royalrender/addon.py b/client/ayon_core/modules/royalrender/addon.py new file mode 100644 index 0000000000..e69cf9feec --- /dev/null +++ b/client/ayon_core/modules/royalrender/addon.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +"""Module providing support for Royal Render.""" +import os + +from ayon_core.addon import AYONAddon, IPluginPaths + + +class RoyalRenderAddon(AYONAddon, IPluginPaths): + """Class providing basic Royal Render implementation logic.""" + name = "royalrender" + + # _rr_api = None + # @property + # def rr_api(self): + # if not self._rr_api: + # # import royal render modules + # from .api import Api + # self._rr_api = Api(self.settings) + # return self._rr_api + + def initialize(self, studio_settings): + # type: (dict) -> None + self.enabled = self.name in studio_settings + + @staticmethod + def get_plugin_paths(): + # type: () -> dict + """Royal Render plugin paths. + + Returns: + dict: Dictionary of plugin paths for RR. + """ + current_dir = os.path.dirname(os.path.abspath(__file__)) + return { + "publish": [os.path.join(current_dir, "plugins", "publish")] + } diff --git a/client/ayon_core/modules/royalrender/lib.py b/client/ayon_core/modules/royalrender/lib.py index d985a39d24..d552e7fb19 100644 --- a/client/ayon_core/modules/royalrender/lib.py +++ b/client/ayon_core/modules/royalrender/lib.py @@ -10,7 +10,12 @@ from datetime import datetime import pyblish.api -from ayon_core.lib import BoolDef, NumberDef, is_running_from_build +from ayon_core.lib import ( + BoolDef, + NumberDef, + is_running_from_build, + is_in_tests, +) from ayon_core.lib.execute import run_ayon_launcher_process from ayon_core.modules.royalrender.api import Api as rrApi from ayon_core.modules.royalrender.rr_job import ( @@ -22,7 +27,6 @@ from ayon_core.modules.royalrender.rr_job import ( from ayon_core.pipeline import AYONPyblishPluginMixin from ayon_core.pipeline.publish import KnownPublishError from ayon_core.pipeline.publish.lib import get_published_workfile_instance -from ayon_core.tests.lib import is_in_tests class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin, @@ -104,9 +108,7 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin, context = instance.context - self._rr_root = self._resolve_rr_path(context, instance.data.get( - "rrPathName")) # noqa - self.log.debug(self._rr_root) + self._rr_root = instance.data.get("rr_root") if not self._rr_root: raise KnownPublishError( ("Missing RoyalRender root. " @@ -206,35 +208,6 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin, """Host specific mapping for RRJob""" raise NotImplementedError - @staticmethod - def _resolve_rr_path(context, rr_path_name): - # type: (pyblish.api.Context, str) -> str - rr_settings = ( - context.data - ["system_settings"] - ["modules"] - ["royalrender"] - ) - try: - default_servers = rr_settings["rr_paths"] - project_servers = ( - context.data - ["project_settings"] - ["royalrender"] - ["rr_paths"] - ) - rr_servers = { - k: default_servers[k] - for k in project_servers - if k in default_servers - } - - except (AttributeError, KeyError): - # Handle situation were we had only one url for royal render. - return context.data["defaultRRPath"][platform.system().lower()] - - return rr_servers[rr_path_name][platform.system().lower()] - def expected_files(self, instance, path, start_frame, end_frame): """Get expected files. @@ -346,7 +319,7 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin, add_kwargs = { "project": anatomy_data["project"]["name"], - "asset": instance.context.data["asset"], + "asset": instance.context.data["folderPath"], "task": anatomy_data["task"]["name"], "app": instance.context.data.get("appName"), "envgroup": "farm" @@ -357,8 +330,8 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin, if not all(add_kwargs.values()): raise RuntimeError(( - "Missing required env vars: AVALON_PROJECT, AVALON_ASSET," - " AVALON_TASK, AVALON_APP_NAME" + "Missing required env vars: AYON_PROJECT_NAME, AYON_FOLDER_PATH," + " AYON_TASK_NAME, AYON_APP_NAME" )) for key, value in add_kwargs.items(): diff --git a/client/ayon_core/modules/royalrender/plugins/publish/collect_rr_path_from_instance.py b/client/ayon_core/modules/royalrender/plugins/publish/collect_rr_path_from_instance.py index e978ce5bed..7fad573a8b 100644 --- a/client/ayon_core/modules/royalrender/plugins/publish/collect_rr_path_from_instance.py +++ b/client/ayon_core/modules/royalrender/plugins/publish/collect_rr_path_from_instance.py @@ -1,47 +1,52 @@ # -*- coding: utf-8 -*- +""" +Requires: + instance.context.data["project_settings"] +Provides: + instance.data["rr_root"] (str) - root folder of RoyalRender server +""" +import os.path + import pyblish.api +from ayon_core.modules.royalrender.rr_job import get_rr_platform class CollectRRPathFromInstance(pyblish.api.InstancePlugin): - """Collect RR Path from instance.""" + """Collect RR Path from instance. + + All RoyalRender server roots are set in `Studio Settings`, each project + uses only key pointing to that part to limit typos inside of Project + settings. + Eventually could be possible to add dropdown with these keys to the + Creators to allow artists to select which RR server they would like to use. + """ order = pyblish.api.CollectorOrder label = "Collect Royal Render path name from the Instance" families = ["render", "prerender", "renderlayer"] def process(self, instance): - instance.data["rrPathName"] = self._collect_rr_path_name(instance) + instance.data["rr_root"] = self._collect_root(instance) self.log.info( - "Using '{}' for submission.".format(instance.data["rrPathName"])) + "Using '{}' for submission.".format(instance.data["rr_root"])) - @staticmethod - def _collect_rr_path_name(instance): + def _collect_root(self, instance): # type: (pyblish.api.Instance) -> str - """Get Royal Render pat name from render instance.""" - rr_settings = ( - instance.context.data - ["system_settings"] - ["modules"] - ["royalrender"] - ) - if not instance.data.get("rrPaths"): - return "default" - try: - default_servers = rr_settings["rr_paths"] - project_servers = ( - instance.context.data - ["project_settings"] - ["royalrender"] - ["rr_paths"] - ) - rr_servers = { - k: default_servers[k] - for k in project_servers - if k in default_servers - } + """Get Royal Render pat name from render instance. + If artist should be able to select specific RR server it must be added + to creator. It is not there yet. + """ + rr_settings = instance.context.data["project_settings"]["royalrender"] + rr_paths = rr_settings["rr_paths"] + selected_keys = rr_settings["selected_rr_paths"] - except (AttributeError, KeyError): - # Handle situation were we had only one url for royal render. - return rr_settings["rr_paths"]["default"] + platform = get_rr_platform() + key_to_path = { + item["name"]: item["value"][platform] + for item in rr_paths + } - return list(rr_servers.keys())[int(instance.data.get("rrPaths"))] + for selected_key in selected_keys: + rr_root = key_to_path[selected_key] + if os.path.exists(rr_root): + return rr_root diff --git a/client/ayon_core/modules/royalrender/plugins/publish/collect_sequences_from_job.py b/client/ayon_core/modules/royalrender/plugins/publish/collect_sequences_from_job.py index a253a1ec5b..7f7b89590c 100644 --- a/client/ayon_core/modules/royalrender/plugins/publish/collect_sequences_from_job.py +++ b/client/ayon_core/modules/royalrender/plugins/publish/collect_sequences_from_job.py @@ -8,8 +8,6 @@ from pprint import pformat import pyblish.api -from ayon_core.pipeline import legacy_io - def collect(root, regex=None, @@ -132,7 +130,6 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin): session = metadata.get("session") if session: self.log.info("setting session using metadata") - legacy_io.Session.update(session) os.environ.update(session) else: @@ -153,8 +150,8 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin): self.log.info("Found collections: {}".format(collections)) - if data.get("subset") and len(collections) > 1: - self.log.error("Forced subset can only work with a single " + if data.get("productName") and len(collections) > 1: + self.log.error("Forced produce can only work with a single " "found sequence") raise RuntimeError("Invalid sequence") @@ -177,8 +174,10 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin): # Ensure each instance gets a unique reference to the data data = copy.deepcopy(data) - # If no subset provided, get it from collection's head - subset = data.get("subset", collection.head.rstrip("_. ")) + # If no product provided, get it from collection's head + product_name = ( + data.get("productName", collection.head.rstrip("_. ")) + ) # If no start or end frame provided, get it from collection indices = list(collection.indexes) @@ -189,11 +188,12 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin): instance.data.update({ "name": str(collection), - "family": families[0], # backwards compatibility / pyblish + "productType": families[0], + "family": families[0], "families": list(families), - "subset": subset, - "asset": data.get( - "asset", context.data["asset"] + "productName": product_name, + "folderPath": data.get( + "folderPath", context.data["folderPath"] ), "stagingDir": root, "frameStart": start, diff --git a/client/ayon_core/modules/royalrender/plugins/publish/create_nuke_royalrender_job.py b/client/ayon_core/modules/royalrender/plugins/publish/create_nuke_royalrender_job.py index 4234535b23..9a3bf3624b 100644 --- a/client/ayon_core/modules/royalrender/plugins/publish/create_nuke_royalrender_job.py +++ b/client/ayon_core/modules/royalrender/plugins/publish/create_nuke_royalrender_job.py @@ -15,10 +15,12 @@ class CreateNukeRoyalRenderJob(lib.BaseCreateRoyalRenderJob): super(CreateNukeRoyalRenderJob, self).process(instance) # redefinition of families - if "render" in instance.data["family"]: + if "render" in instance.data["productType"]: + instance.data["productType"] = "write" instance.data["family"] = "write" instance.data["families"].insert(0, "render2d") - elif "prerender" in instance.data["family"]: + elif "prerender" in instance.data["productType"]: + instance.data["productType"] = "write" instance.data["family"] = "write" instance.data["families"].insert(0, "prerender") diff --git a/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py b/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py index 680795a329..5d177fec07 100644 --- a/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py +++ b/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py @@ -13,13 +13,10 @@ from ayon_core.modules.royalrender.rr_job import ( get_rr_platform ) from ayon_core.pipeline.publish import KnownPublishError -from ayon_core.pipeline import ( - legacy_io, -) from ayon_core.pipeline.farm.pyblish_functions import ( create_skeleton_instance, create_instances_for_aov, - attach_instances_to_subset, + attach_instances_to_product, prepare_representations, create_metadata_path ) @@ -66,7 +63,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin, "FTRACK_API_USER", "FTRACK_API_KEY", "FTRACK_SERVER", - "AVALON_APP_NAME", + "AYON_APP_NAME", "AYON_USERNAME", "OPENPYPE_SG_USER", ] @@ -116,9 +113,9 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin, instance_skeleton_data["representations"] += representations instances = [instance_skeleton_data] - # attach instances to subset + # attach instances to product if instance.data.get("attachTo"): - instances = attach_instances_to_subset( + instances = attach_instances_to_product( instance.data.get("attachTo"), instances ) @@ -135,7 +132,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin, # publish job file publish_job = { - "asset": instance_skeleton_data["asset"], + "folderPath": instance_skeleton_data["folderPath"], "frameStart": instance_skeleton_data["frameStart"], "frameEnd": instance_skeleton_data["frameEnd"], "fps": instance_skeleton_data["fps"], @@ -145,7 +142,6 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin, "intent": instance.context.data.get("intent"), "comment": instance.context.data.get("comment"), "job": attr.asdict(rr_job), - "session": legacy_io.Session.copy(), "instances": instances } @@ -172,8 +168,8 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin, """ data = instance.data.copy() - subset = data["subset"] - jobname = "Publish - {subset}".format(subset=subset) + product_name = data["productName"] + jobname = "Publish - {}".format(product_name) # Transfer the environment from the original job to this dependent # job, so they use the same environment @@ -183,9 +179,9 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin, anatomy_data = instance.context.data["anatomyData"] environment = RREnvList({ - "AVALON_PROJECT": anatomy_data["project"]["name"], - "AVALON_ASSET": instance.context.data["asset"], - "AVALON_TASK": anatomy_data["task"]["name"], + "AYON_PROJECT_NAME": anatomy_data["project"]["name"], + "AYON_FOLDER_PATH": instance.context.data["folderPath"], + "AYON_TASK_NAME": anatomy_data["task"]["name"], "AYON_USERNAME": anatomy_data["user"] }) @@ -220,7 +216,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin, SeqEnd=1, SeqStep=1, SeqFileOffset=0, - Version=self._sanitize_version(os.environ.get("OPENPYPE_VERSION")), + Version=os.environ["AYON_BUNDLE_NAME"], SceneName=abs_metadata_path, # command line arguments CustomAddCmdFlags=" ".join(args), @@ -247,26 +243,3 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin, job.WaitForPreIDs += jobs_pre_ids return job - - def _sanitize_version(self, version): - """Returns version in format MAJOR.MINORPATCH - - 3.15.7-nightly.2 >> 3.157 - """ - VERSION_REGEX = re.compile( - r"(?P0|[1-9]\d*)" - r"\.(?P0|[1-9]\d*)" - r"\.(?P0|[1-9]\d*)" - r"(?:-(?P[a-zA-Z\d\-.]*))?" - r"(?:\+(?P[a-zA-Z\d\-.]*))?" - ) - - valid_parts = VERSION_REGEX.findall(version) - if len(valid_parts) != 1: - # Return invalid version with filled 'origin' attribute - return version - - # Unpack found version - major, minor, patch, pre, post = valid_parts[0] - - return "{}.{}{}".format(major, minor, patch) diff --git a/client/ayon_core/modules/royalrender/plugins/publish/submit_jobs_to_royalrender.py b/client/ayon_core/modules/royalrender/plugins/publish/submit_jobs_to_royalrender.py index a76bdfc26c..54de943428 100644 --- a/client/ayon_core/modules/royalrender/plugins/publish/submit_jobs_to_royalrender.py +++ b/client/ayon_core/modules/royalrender/plugins/publish/submit_jobs_to_royalrender.py @@ -25,16 +25,6 @@ class SubmitJobsToRoyalRender(pyblish.api.ContextPlugin): self._submission_parameters = [] def process(self, context): - rr_settings = ( - context.data - ["system_settings"] - ["modules"] - ["royalrender"] - ) - - if rr_settings["enabled"] is not True: - self.log.warning("RoyalRender modules is disabled.") - return # iterate over all instances and try to find RRJobs jobs = [] @@ -47,11 +37,11 @@ class SubmitJobsToRoyalRender(pyblish.api.ContextPlugin): isinstance(job, RRJob) for job in instance.data.get("rrJobs")): jobs += instance.data.get("rrJobs") - if instance.data.get("rrPathName"): - instance_rr_path = instance.data["rrPathName"] + if instance.data.get("rr_root"): + instance_rr_path = instance.data["rr_root"] if jobs: - self._rr_root = self._resolve_rr_path(context, instance_rr_path) + self._rr_root = instance_rr_path if not self._rr_root: raise KnownPublishError( ("Missing RoyalRender root. " @@ -100,32 +90,3 @@ class SubmitJobsToRoyalRender(pyblish.api.ContextPlugin): def get_submission_parameters(self): return [SubmitterParameter("RequiredMemory", "0")] - - @staticmethod - def _resolve_rr_path(context, rr_path_name): - # type: (pyblish.api.Context, str) -> str - rr_settings = ( - context.data - ["system_settings"] - ["modules"] - ["royalrender"] - ) - try: - default_servers = rr_settings["rr_paths"] - project_servers = ( - context.data - ["project_settings"] - ["royalrender"] - ["rr_paths"] - ) - rr_servers = { - k: default_servers[k] - for k in project_servers - if k in default_servers - } - - except (AttributeError, KeyError): - # Handle situation were we had only one url for royal render. - return context.data["defaultRRPath"][platform.system().lower()] - - return rr_servers[rr_path_name][platform.system().lower()] diff --git a/client/ayon_core/modules/royalrender/royal_render_module.py b/client/ayon_core/modules/royalrender/royal_render_module.py deleted file mode 100644 index 66b09832d8..0000000000 --- a/client/ayon_core/modules/royalrender/royal_render_module.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -"""Module providing support for Royal Render.""" -import os -import ayon_core.modules -from ayon_core.modules import OpenPypeModule, IPluginPaths - - -class RoyalRenderModule(OpenPypeModule, IPluginPaths): - """Class providing basic Royal Render implementation logic.""" - name = "royalrender" - - @property - def api(self): - if not self._api: - # import royal render modules - from . import api as rr_api - self._api = rr_api.Api(self.settings) - - return self._api - - def __init__(self, manager, settings): - # type: (ayon_core.addon.AddonsManager, dict) -> None - self.rr_paths = {} - self._api = None - self.settings = settings - super(RoyalRenderModule, self).__init__(manager, settings) - - def initialize(self, module_settings): - # type: (dict) -> None - rr_settings = module_settings[self.name] - self.enabled = rr_settings["enabled"] - self.rr_paths = rr_settings.get("rr_paths") - - @staticmethod - def get_plugin_paths(): - # type: () -> dict - """Royal Render plugin paths. - - Returns: - dict: Dictionary of plugin paths for RR. - """ - current_dir = os.path.dirname(os.path.abspath(__file__)) - return { - "publish": [os.path.join(current_dir, "plugins", "publish")] - } diff --git a/client/ayon_core/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py b/client/ayon_core/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py index 7118c5ebef..778052778f 100644 --- a/client/ayon_core/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py +++ b/client/ayon_core/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py @@ -136,10 +136,10 @@ class OpenPypeContextSelector: def run_publish(self): """Run publish process.""" - env = {"AVALON_PROJECT": str(self.context.get("project")), - "AVALON_ASSET": str(self.context.get("asset")), - "AVALON_TASK": str(self.context.get("task")), - # "AVALON_APP_NAME": str(self.context.get("app_name")) + env = {"AYON_PROJECT_NAME": str(self.context.get("project")), + "AYON_FOLDER_PATH": str(self.context.get("asset")), + "AYON_TASK_NAME": str(self.context.get("task")), + # "AYON_APP_NAME": str(self.context.get("app_name")) } print(">>> setting environment:") @@ -182,10 +182,18 @@ print("running selector") selector = OpenPypeContextSelector() # try to set context from environment -selector.context["project"] = os.getenv("AVALON_PROJECT") -selector.context["asset"] = os.getenv("AVALON_ASSET") -selector.context["task"] = os.getenv("AVALON_TASK") -# selector.context["app_name"] = os.getenv("AVALON_APP_NAME") +for key, env_keys in ( + ("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]), + ("asset", ["AYON_FOLDER_PATH", "AVALON_ASSET"]), + ("task", ["AYON_TASK_NAME", "AVALON_TASK"]), + # ("app_name", ["AYON_APP_NAME", "AVALON_APP_NAME"]) +): + value = "" + for env_key in env_keys: + value = os.getenv(env_key) + if value: + break + selector.context[key] = value # if anything inside is None, scratch the whole thing and # ask user for context. diff --git a/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py b/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py index d8710a9b26..4e94603aa9 100644 --- a/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py +++ b/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py @@ -11,14 +11,14 @@ class PostStartTimerHook(PostLaunchHook): def execute(self): project_name = self.data.get("project_name") - asset_name = self.data.get("asset_name") + asset_name = self.data.get("folder_path") task_name = self.data.get("task_name") missing_context_keys = set() if not project_name: missing_context_keys.add("project_name") if not asset_name: - missing_context_keys.add("asset_name") + missing_context_keys.add("folder_path") if not task_name: missing_context_keys.add("task_name") diff --git a/client/ayon_core/modules/timers_manager/plugins/publish/start_timer.py b/client/ayon_core/modules/timers_manager/plugins/publish/start_timer.py index 51f707ecf6..182efbc4ae 100644 --- a/client/ayon_core/modules/timers_manager/plugins/publish/start_timer.py +++ b/client/ayon_core/modules/timers_manager/plugins/publish/start_timer.py @@ -1,6 +1,6 @@ """ Requires: - context -> system_settings + context -> project_settings context -> ayonAddonsManager """ @@ -18,13 +18,13 @@ class StartTimer(pyblish.api.ContextPlugin): self.log.debug("TimersManager is disabled") return - modules_settings = context.data["system_settings"]["modules"] - if not modules_settings["timers_manager"]["disregard_publishing"]: + project_settings = context.data["project_settings"] + if not project_settings["timers_manager"]["disregard_publishing"]: self.log.debug("Publish is not affecting running timers.") return project_name = context.data["projectName"] - asset_name = context.data.get("asset") + asset_name = context.data.get("folderPath") task_name = context.data.get("task") if not project_name or not asset_name or not task_name: self.log.info(( diff --git a/client/ayon_core/modules/timers_manager/plugins/publish/stop_timer.py b/client/ayon_core/modules/timers_manager/plugins/publish/stop_timer.py index 9d7cb33ba9..eafd8cb450 100644 --- a/client/ayon_core/modules/timers_manager/plugins/publish/stop_timer.py +++ b/client/ayon_core/modules/timers_manager/plugins/publish/stop_timer.py @@ -1,6 +1,6 @@ """ Requires: - context -> system_settings + context -> project_settings context -> ayonAddonsManager """ @@ -19,8 +19,8 @@ class StopTimer(pyblish.api.ContextPlugin): self.log.debug("TimersManager is disabled") return - modules_settings = context.data["system_settings"]["modules"] - if not modules_settings["timers_manager"]["disregard_publishing"]: + project_settings = context.data["project_settings"] + if not project_settings["timers_manager"]["disregard_publishing"]: self.log.debug("Publish is not affecting running timers.") return diff --git a/client/ayon_core/modules/timers_manager/rest_api.py b/client/ayon_core/modules/timers_manager/rest_api.py index b460719f80..c890d587de 100644 --- a/client/ayon_core/modules/timers_manager/rest_api.py +++ b/client/ayon_core/modules/timers_manager/rest_api.py @@ -45,12 +45,12 @@ class TimersManagerModuleRestApi: data = await request.json() try: project_name = data["project_name"] - asset_name = data["asset_name"] + asset_name = data["folder_path"] task_name = data["task_name"] except KeyError: msg = ( "Payload must contain fields 'project_name," - " 'asset_name' and 'task_name'" + " 'folder_path' and 'task_name'" ) self.log.error(msg) return Response(status=400, message=msg) @@ -71,11 +71,11 @@ class TimersManagerModuleRestApi: data = await request.json() try: project_name = data['project_name'] - asset_name = data['asset_name'] + asset_name = data['folder_path'] task_name = data['task_name'] except KeyError: message = ( - "Payload must contain fields 'project_name, 'asset_name'," + "Payload must contain fields 'project_name, 'folder_path'," " 'task_name'" ) self.log.warning(message) diff --git a/client/ayon_core/modules/timers_manager/timers_manager.py b/client/ayon_core/modules/timers_manager/timers_manager.py index daba0cead9..e04200525a 100644 --- a/client/ayon_core/modules/timers_manager/timers_manager.py +++ b/client/ayon_core/modules/timers_manager/timers_manager.py @@ -3,8 +3,8 @@ import platform from ayon_core.client import get_asset_by_name -from ayon_core.modules import ( - OpenPypeModule, +from ayon_core.addon import ( + AYONAddon, ITrayService, IPluginPaths ) @@ -76,7 +76,7 @@ class ExampleTimersManagerConnector: class TimersManager( - OpenPypeModule, + AYONAddon, ITrayService, IPluginPaths ): @@ -99,23 +99,27 @@ class TimersManager( "start_timer" ) - def initialize(self, modules_settings): - timers_settings = modules_settings[self.name] + def initialize(self, studio_settings): + timers_settings = studio_settings.get(self.name) + enabled = timers_settings is not None - self.enabled = timers_settings["enabled"] + auto_stop = False + full_time = 0 + message_time = 0 + if enabled: + # When timer will stop if idle manager is running (minutes) + full_time = int(timers_settings["full_time"] * 60) + # How many minutes before the timer is stopped will popup the message + message_time = int(timers_settings["message_time"] * 60) - # When timer will stop if idle manager is running (minutes) - full_time = int(timers_settings["full_time"] * 60) - # How many minutes before the timer is stopped will popup the message - message_time = int(timers_settings["message_time"] * 60) - - auto_stop = timers_settings["auto_stop"] - platform_name = platform.system().lower() - # Turn of auto stop on MacOs because pynput requires root permissions - # and on linux can cause thread locks on application close - if full_time <= 0 or platform_name in ("darwin", "linux"): - auto_stop = False + auto_stop = timers_settings["auto_stop"] + platform_name = platform.system().lower() + # Turn of auto stop on MacOs because pynput requires root permissions + # and on linux can cause thread locks on application close + if full_time <= 0 or platform_name in ("darwin", "linux"): + auto_stop = False + self.enabled = enabled self.auto_stop = auto_stop self.time_show_message = full_time - message_time self.time_stop_timer = full_time @@ -426,7 +430,7 @@ class TimersManager( return data = { "project_name": project_name, - "asset_name": asset_name, + "folder_path": asset_name, "task_name": task_name } @@ -468,7 +472,7 @@ class TimersManager( def _on_host_task_change(self, event): project_name = event["project_name"] - asset_name = event["asset_name"] + asset_name = event["folder_path"] task_name = event["task_name"] self.log.debug(( "Sending message that timer should change to" diff --git a/client/ayon_core/modules/timers_manager/widget_user_idle.py b/client/ayon_core/modules/timers_manager/widget_user_idle.py index 94d7a606ed..c59ab15b38 100644 --- a/client/ayon_core/modules/timers_manager/widget_user_idle.py +++ b/client/ayon_core/modules/timers_manager/widget_user_idle.py @@ -9,7 +9,7 @@ class WidgetUserIdle(QtWidgets.QWidget): def __init__(self, module): super(WidgetUserIdle, self).__init__() - self.setWindowTitle("OpenPype - Stop timers") + self.setWindowTitle("AYON - Stop timers") icon = QtGui.QIcon(resources.get_ayon_icon_filepath()) self.setWindowIcon(icon) diff --git a/client/ayon_core/modules/webserver/webserver_module.py b/client/ayon_core/modules/webserver/webserver_module.py index ec143d0866..c324e0dd18 100644 --- a/client/ayon_core/modules/webserver/webserver_module.py +++ b/client/ayon_core/modules/webserver/webserver_module.py @@ -1,6 +1,6 @@ """WebServerAddon spawns aiohttp server in asyncio loop. -Main usage of the module is in OpenPype tray where make sense to add ability +Main usage of the module is in AYON tray where make sense to add ability of other modules to add theirs routes. Module which would want use that option must have implemented method `webserver_initialization` which must expect `WebServerManager` object where is possible to add routes or paths diff --git a/client/ayon_core/pipeline/__init__.py b/client/ayon_core/pipeline/__init__.py index c5507b0a7b..679e9a195e 100644 --- a/client/ayon_core/pipeline/__init__.py +++ b/client/ayon_core/pipeline/__init__.py @@ -1,6 +1,8 @@ from .constants import ( AVALON_CONTAINER_ID, + AVALON_INSTANCE_ID, AYON_CONTAINER_ID, + AYON_INSTANCE_ID, HOST_WORKFILE_EXTENSIONS, ) @@ -101,7 +103,9 @@ uninstall = uninstall_host __all__ = ( "AVALON_CONTAINER_ID", + "AVALON_INSTANCE_ID", "AYON_CONTAINER_ID", + "AYON_INSTANCE_ID", "HOST_WORKFILE_EXTENSIONS", # --- Anatomy --- diff --git a/client/ayon_core/pipeline/actions.py b/client/ayon_core/pipeline/actions.py index 1701498d10..8e0ce7e583 100644 --- a/client/ayon_core/pipeline/actions.py +++ b/client/ayon_core/pipeline/actions.py @@ -26,7 +26,7 @@ class LauncherAction(object): Args: session (dict[str, Union[str, None]]): Session data with - AVALON_PROJECT, AVALON_ASSET and AVALON_TASK. + AYON_PROJECT_NAME, AYON_FOLDER_PATH and AYON_TASK_NAME. """ return True diff --git a/client/ayon_core/pipeline/anatomy.py b/client/ayon_core/pipeline/anatomy.py index 86b7d92309..e7833a9a15 100644 --- a/client/ayon_core/pipeline/anatomy.py +++ b/client/ayon_core/pipeline/anatomy.py @@ -8,9 +8,6 @@ import numbers import six import time -from ayon_core.settings.lib import ( - get_local_settings, -) from ayon_core.client import get_project, get_ayon_server_api_connection from ayon_core.lib import Logger, get_local_site_id from ayon_core.lib.path_templates import ( @@ -423,7 +420,7 @@ class Anatomy(BaseAnatomy): def __init__(self, project_name=None, site_name=None): if not project_name: - project_name = os.environ.get("AVALON_PROJECT") + project_name = os.environ.get("AYON_PROJECT_NAME") if not project_name: raise ProjectNotSet(( @@ -453,7 +450,7 @@ class Anatomy(BaseAnatomy): return cls._sync_server_addon_cache.data @classmethod - def _get_studio_roots_overrides(cls, project_name, local_settings=None): + def _get_studio_roots_overrides(cls, project_name): """This would return 'studio' site override by local settings. Notes: @@ -465,7 +462,6 @@ class Anatomy(BaseAnatomy): Args: project_name (str): Name of project. - local_settings (Optional[dict[str, Any]]): Prepared local settings. Returns: Union[Dict[str, str], None]): Local root overrides. @@ -488,11 +484,6 @@ class Anatomy(BaseAnatomy): should be returned. """ - # Local settings may be used more than once or may not be used at all - # - to avoid slowdowns 'get_local_settings' is not called until it's - # really needed - local_settings = None - # First check if sync server is available and enabled sync_server = cls.get_sync_server_addon() if sync_server is None or not sync_server.enabled: @@ -503,11 +494,8 @@ class Anatomy(BaseAnatomy): # Use sync server to receive active site name project_cache = cls._default_site_id_cache[project_name] if project_cache.is_outdated: - local_settings = get_local_settings() project_cache.update_data( - sync_server.get_active_site_type( - project_name, local_settings - ) + sync_server.get_active_site_type(project_name) ) site_name = project_cache.data @@ -517,12 +505,12 @@ class Anatomy(BaseAnatomy): # Handle studio root overrides without sync server # - studio root overrides can be done even without sync server roots_overrides = cls._get_studio_roots_overrides( - project_name, local_settings + project_name ) else: # Ask sync server to get roots overrides roots_overrides = sync_server.get_site_root_overrides( - project_name, site_name, local_settings + project_name, site_name ) site_cache.update_data(roots_overrides) return site_cache.data diff --git a/client/ayon_core/pipeline/colorspace.py b/client/ayon_core/pipeline/colorspace.py index d77f301498..7100984217 100644 --- a/client/ayon_core/pipeline/colorspace.py +++ b/client/ayon_core/pipeline/colorspace.py @@ -254,7 +254,7 @@ def get_imageio_file_rules_colorspace_from_filepath( # match file rule from path colorspace_name = None - for file_rule in file_rules.values(): + for file_rule in file_rules: pattern = file_rule["pattern"] extension = file_rule["ext"] ext_match = re.match( @@ -281,7 +281,7 @@ def get_config_file_rules_colorspace_from_filepath(config_path, filepath): filepath (str): path leading to a file Returns: - Any[str, None]: matching colorspace name + Union[str, None]: matching colorspace name """ if not compatibility_check(): # python environment is not compatible with PyOpenColorIO @@ -918,28 +918,13 @@ def get_imageio_file_rules(project_name, host_name, project_settings=None): Defaults to None. Returns: - dict: file rules data + list[dict[str, Any]]: file rules data """ project_settings = project_settings or get_project_settings(project_name) imageio_global, imageio_host = _get_imageio_settings( project_settings, host_name) - # get file rules from global and host_name - frules_global = imageio_global["file_rules"] - activate_global_rules = ( - frules_global.get("activate_global_file_rules", False) - # TODO: remove this in future - backward compatibility - or frules_global.get("enabled") - ) - global_rules = frules_global["rules"] - - if not activate_global_rules: - log.info( - "Colorspace global file rules are disabled." - ) - global_rules = {} - # host is optional, some might not have any settings frules_host = imageio_host.get("file_rules", {}) @@ -949,8 +934,24 @@ def get_imageio_file_rules(project_name, host_name, project_settings=None): # TODO: remove this in future - backward compatibility activate_host_rules = frules_host.get("enabled", False) - # return host rules if activated or global rules - return frules_host["rules"] if activate_host_rules else global_rules + if activate_host_rules: + return frules_host["rules"] + + # get file rules from global and host_name + frules_global = imageio_global["file_rules"] + activate_global_rules = ( + frules_global.get("activate_global_file_rules", False) + # TODO: remove this in future - backward compatibility + or frules_global.get("enabled") + ) + + if not activate_global_rules: + log.info( + "Colorspace global file rules are disabled." + ) + return [] + + return frules_global["rules"] def get_remapped_colorspace_to_native( @@ -1017,7 +1018,7 @@ def _get_imageio_settings(project_settings, host_name): tuple[dict, dict]: image io settings for global and host """ # get image io from global and host_name - imageio_global = project_settings["global"]["imageio"] + imageio_global = project_settings["core"]["imageio"] # host is optional, some might not have any settings imageio_host = project_settings.get(host_name, {}).get("imageio", {}) diff --git a/client/ayon_core/pipeline/constants.py b/client/ayon_core/pipeline/constants.py index 755a5fb380..7a08cbb3aa 100644 --- a/client/ayon_core/pipeline/constants.py +++ b/client/ayon_core/pipeline/constants.py @@ -1,5 +1,9 @@ # Metadata ID of loaded container into scene -AVALON_CONTAINER_ID = AYON_CONTAINER_ID = "pyblish.avalon.container" +AYON_CONTAINER_ID = "ayon.load.container" +AYON_INSTANCE_ID = "ayon.create.instance" +# Backwards compatibility +AVALON_CONTAINER_ID = "pyblish.avalon.container" +AVALON_INSTANCE_ID = "pyblish.avalon.instance" # TODO get extensions from host implementations HOST_WORKFILE_EXTENSIONS = { diff --git a/client/ayon_core/pipeline/context_tools.py b/client/ayon_core/pipeline/context_tools.py index 197b1eb6e6..86b3d770b4 100644 --- a/client/ayon_core/pipeline/context_tools.py +++ b/client/ayon_core/pipeline/context_tools.py @@ -1,7 +1,6 @@ """Core pipeline functionality""" import os -import json import types import logging import platform @@ -20,20 +19,20 @@ from ayon_core.client import ( get_asset_name_identifier, get_ayon_server_api_connection, ) +from ayon_core.lib import is_in_tests from ayon_core.lib.events import emit_event from ayon_core.addon import load_addons, AddonsManager from ayon_core.settings import get_project_settings -from ayon_core.tests.lib import is_in_tests from .publish.lib import filter_pyblish_plugins from .anatomy import Anatomy from .template_data import get_template_data_with_names from .workfile import ( + get_workdir, get_workfile_template_key, get_custom_workfile_template_by_string_context, ) from . import ( - legacy_io, register_loader_plugin_path, register_inventory_action_path, register_creator_plugin_path, @@ -116,22 +115,17 @@ def install_host(host): # Make sure global AYON connection has set site id and version get_ayon_server_api_connection() - legacy_io.install() addons_manager = _get_addons_manager() - missing = list() - for key in ("AVALON_PROJECT", "AVALON_ASSET"): - if key not in legacy_io.Session: - missing.append(key) + project_name = os.getenv("AYON_PROJECT_NAME") + # WARNING: This might be an issue + # - commented out because 'traypublisher' does not have set project + # if not project_name: + # raise ValueError( + # "AYON_PROJECT_NAME is missing in environment variables." + # ) - assert not missing, ( - "%s missing from environment, %s" % ( - ", ".join(missing), - json.dumps(legacy_io.Session, indent=4, sort_keys=True) - )) - - project_name = legacy_io.Session["AVALON_PROJECT"] - log.info("Activating %s.." % project_name) + log.info("Activating {}..".format(project_name)) # Optional host install function if hasattr(host, "install"): @@ -158,14 +152,13 @@ def install_host(host): print("Registering pyblish target: automated") pyblish.api.register_target("automated") - project_name = os.environ.get("AVALON_PROJECT") - host_name = os.environ.get("AVALON_APP") + host_name = os.environ.get("AYON_HOST_NAME") # Give option to handle host installation for addon in addons_manager.get_enabled_addons(): addon.on_host_install(host, host_name, project_name) - install_openpype_plugins(project_name, host_name) + install_ayon_plugins(project_name, host_name) def install_ayon_plugins(project_name=None, host_name=None): @@ -179,7 +172,7 @@ def install_ayon_plugins(project_name=None, host_name=None): register_inventory_action_path(INVENTORY_PATH) if host_name is None: - host_name = os.environ.get("AVALON_APP") + host_name = os.environ.get("AYON_HOST_NAME") addons_manager = _get_addons_manager() publish_plugin_dirs = addons_manager.collect_publish_plugin_paths( @@ -203,7 +196,7 @@ def install_ayon_plugins(project_name=None, host_name=None): register_inventory_action_path(path) if project_name is None: - project_name = os.environ.get("AVALON_PROJECT") + project_name = os.environ.get("AYON_PROJECT_NAME") # Register studio specific plugins if project_name: @@ -215,8 +208,8 @@ def install_ayon_plugins(project_name=None, host_name=None): platform_name = platform.system().lower() project_plugins = ( project_settings - .get("global", {}) - .get("project_plugins", {}) + ["core"] + ["project_plugins"] .get(platform_name) ) or [] for path in project_plugins: @@ -256,8 +249,6 @@ def uninstall_host(): deregister_host() - legacy_io.uninstall() - log.info("Successfully uninstalled Avalon!") @@ -340,7 +331,7 @@ def get_current_host_name(): """Current host name. Function is based on currently registered host integration or environment - variable 'AVALON_APP'. + variable 'AYON_HOST_NAME'. Returns: Union[str, None]: Name of host integration in current process or None. @@ -349,7 +340,7 @@ def get_current_host_name(): host = registered_host() if isinstance(host, HostBase): return host.name - return os.environ.get("AVALON_APP") + return os.environ.get("AYON_HOST_NAME") def get_global_context(): @@ -364,7 +355,7 @@ def get_global_context(): Example: { "project_name": "Commercial", - "asset_name": "Bunny", + "folder_path": "Bunny", "task_name": "Animation", } @@ -374,9 +365,9 @@ def get_global_context(): """ return { - "project_name": os.environ.get("AVALON_PROJECT"), - "asset_name": os.environ.get("AVALON_ASSET"), - "task_name": os.environ.get("AVALON_TASK"), + "project_name": os.environ.get("AYON_PROJECT_NAME"), + "folder_path": os.environ.get("AYON_FOLDER_PATH"), + "task_name": os.environ.get("AYON_TASK_NAME"), } @@ -398,7 +389,7 @@ def get_current_asset_name(): host = registered_host() if isinstance(host, HostBase): return host.get_current_asset_name() - return get_global_context()["asset_name"] + return get_global_context()["folder_path"] def get_current_task_name(): @@ -469,37 +460,42 @@ def is_representation_from_latest(representation): return version_is_latest(project_name, representation["parent"]) -def get_template_data_from_session(session=None, system_settings=None): +def get_template_data_from_session(session=None, settings=None): """Template data for template fill from session keys. Args: session (Union[Dict[str, str], None]): The Session to use. If not provided use the currently active global Session. - system_settings (Union[Dict[str, Any], Any]): Prepared system settings. - Optional are auto received if not passed. + settings (Optional[Dict[str, Any]]): Prepared studio or project + settings. Returns: Dict[str, Any]: All available data from session. """ - if session is None: - session = legacy_io.Session - - project_name = session["AVALON_PROJECT"] - asset_name = session["AVALON_ASSET"] - task_name = session["AVALON_TASK"] - host_name = session["AVALON_APP"] + if session is not None: + project_name = session["AYON_PROJECT_NAME"] + asset_name = session["AYON_FOLDER_PATH"] + task_name = session["AYON_TASK_NAME"] + host_name = session["AYON_HOST_NAME"] + else: + context = get_current_context() + project_name = context["project_name"] + asset_name = context["folder_path"] + task_name = context["task_name"] + host_name = get_current_host_name() return get_template_data_with_names( - project_name, asset_name, task_name, host_name, system_settings + project_name, asset_name, task_name, host_name, settings ) -def get_current_context_template_data(system_settings=None): +def get_current_context_template_data(settings=None): """Prepare template data for current context. Args: - system_settings (Optional[Dict[str, Any]]): Prepared system settings. + settings (Optional[Dict[str, Any]]): Prepared studio or + project settings. Returns: Dict[str, Any] Template data for current context. @@ -507,12 +503,12 @@ def get_current_context_template_data(system_settings=None): context = get_current_context() project_name = context["project_name"] - asset_name = context["asset_name"] + asset_name = context["folder_path"] task_name = context["task_name"] host_name = get_current_host_name() return get_template_data_with_names( - project_name, asset_name, task_name, host_name, system_settings + project_name, asset_name, task_name, host_name, settings ) @@ -529,10 +525,12 @@ def get_workdir_from_session(session=None, template_key=None): str: Workdir path. """ - if session is None: - session = legacy_io.Session - project_name = session["AVALON_PROJECT"] - host_name = session["AVALON_APP"] + if session is not None: + project_name = session["AYON_PROJECT_NAME"] + host_name = session["AYON_HOST_NAME"] + else: + project_name = get_current_project_name() + host_name = get_current_host_name() template_data = get_template_data_from_session(session) if not template_key: @@ -556,86 +554,39 @@ def get_custom_workfile_template_from_session( ): """Filter and fill workfile template profiles by current context. - Current context is defined by `legacy_io.Session`. That's why this - function should be used only inside host where context is set and stable. + This function cab be used only inside host where context is set. Args: - session (Union[None, Dict[str, str]]): Session from which are taken + session (Optional[Dict[str, str]]): Session from which are taken data. - project_settings(Dict[str, Any]): Template profiles from settings. + project_settings(Optional[Dict[str, Any]]): Project settings. Returns: str: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.) """ - if session is None: - session = legacy_io.Session + if session is not None: + project_name = session["AYON_PROJECT_NAME"] + asset_name = session["AYON_FOLDER_PATH"] + task_name = session["AYON_TASK_NAME"] + host_name = session["AYON_HOST_NAME"] + else: + context = get_current_context() + project_name = context["project_name"] + asset_name = context["folder_path"] + task_name = context["task_name"] + host_name = get_current_host_name() return get_custom_workfile_template_by_string_context( - session["AVALON_PROJECT"], - session["AVALON_ASSET"], - session["AVALON_TASK"], - session["AVALON_APP"], + project_name, + asset_name, + task_name, + host_name, project_settings=project_settings ) -def compute_session_changes( - session, asset_doc, task_name, template_key=None -): - """Compute the changes for a session object on task under asset. - - Function does not change the session object, only returns changes. - - Args: - session (Dict[str, str]): The initial session to compute changes to. - This is required for computing the full Work Directory, as that - also depends on the values that haven't changed. - asset_doc (Dict[str, Any]): Asset document to switch to. - task_name (str): Name of task to switch to. - template_key (Union[str, None]): Prepare workfile template key in - anatomy templates. - - Returns: - Dict[str, str]: Changes in the Session dictionary. - """ - - # Get asset document and asset - if not asset_doc: - task_name = None - asset_name = None - else: - asset_name = get_asset_name_identifier(asset_doc) - - # Detect any changes compared session - mapping = { - "AVALON_ASSET": asset_name, - "AVALON_TASK": task_name, - } - changes = { - key: value - for key, value in mapping.items() - if value != session.get(key) - } - if not changes: - return changes - - # Compute work directory (with the temporary changed session so far) - changed_session = session.copy() - changed_session.update(changes) - - workdir = None - if asset_doc: - workdir = get_workdir_from_session( - changed_session, template_key - ) - - changes["AVALON_WORKDIR"] = workdir - - return changes - - def change_current_context(asset_doc, task_name, template_key=None): """Update active Session to a new task work area. @@ -651,32 +602,47 @@ def change_current_context(asset_doc, task_name, template_key=None): Dict[str, str]: The changed key, values in the current Session. """ - changes = compute_session_changes( - legacy_io.Session, - asset_doc, - task_name, - template_key=template_key - ) + project_name = get_current_project_name() + workdir = None + if asset_doc: + project_doc = get_project(project_name) + host_name = get_current_host_name() + workdir = get_workdir( + project_doc, + asset_doc, + task_name, + host_name, + template_key=template_key + ) + + folder_path = get_asset_name_identifier(asset_doc) + envs = { + "AYON_PROJECT_NAME": project_name, + "AYON_FOLDER_PATH": folder_path, + "AYON_TASK_NAME": task_name, + "AYON_WORKDIR": workdir, + } # Update the Session and environments. Pop from environments all keys with # value set to None. - for key, value in changes.items(): - legacy_io.Session[key] = value + for key, value in envs.items(): if value is None: os.environ.pop(key, None) else: os.environ[key] = value - data = changes.copy() + data = envs.copy() + # Convert env keys to human readable keys - data["project_name"] = legacy_io.Session["AVALON_PROJECT"] - data["asset_name"] = legacy_io.Session["AVALON_ASSET"] - data["task_name"] = legacy_io.Session["AVALON_TASK"] + data["project_name"] = project_name + data["folder_path"] = get_asset_name_identifier(asset_doc) + data["task_name"] = task_name + data["workdir_path"] = workdir # Emit session change emit_event("taskChanged", data) - return changes + return data def get_process_id(): diff --git a/client/ayon_core/pipeline/create/README.md b/client/ayon_core/pipeline/create/README.md index 012572a776..bbfd1bfa0f 100644 --- a/client/ayon_core/pipeline/create/README.md +++ b/client/ayon_core/pipeline/create/README.md @@ -6,7 +6,7 @@ Entry point of creation. All data and metadata are handled through create contex Discovers Creator plugins to be able create new instances and convert existing instances. Creators may have defined attributes that are specific for their instances. Attributes definition can enhance behavior of instance during publishing. -Publish plugins are loaded because they can also define attributes definitions. These are less family specific To be able define attributes Publish plugin must inherit from `AYONPyblishPluginMixin` and must override `get_attribute_defs` class method which must return list of attribute definitions. Values of publish plugin definitions are stored per plugin name under `publish_attributes`. Also can override `convert_attribute_values` class method which gives ability to modify values on instance before are used in CreatedInstance. Method `convert_attribute_values` can be also used without `get_attribute_defs` to modify values when changing compatibility (remove metadata from instance because are irrelevant). +Publish plugins are loaded because they can also define attributes definitions. These are less product type specific To be able define attributes Publish plugin must inherit from `AYONPyblishPluginMixin` and must override `get_attribute_defs` class method which must return list of attribute definitions. Values of publish plugin definitions are stored per plugin name under `publish_attributes`. Also can override `convert_attribute_values` class method which gives ability to modify values on instance before are used in CreatedInstance. Method `convert_attribute_values` can be also used without `get_attribute_defs` to modify values when changing compatibility (remove metadata from instance because are irrelevant). Possible attribute definitions can be found in `openpype/pipeline/lib/attribute_definitions.py`. @@ -14,9 +14,9 @@ Except creating and removing instances are all changes not automatically propaga ## CreatedInstance -Product of creation is "instance" which holds basic data defying it. Core data are `creator_identifier`, `family` and `subset`. Other data can be keys used to fill subset name or metadata modifying publishing process of the instance (more described later). All instances have `id` which holds constant `pyblish.avalon.instance` and `instance_id` which is identifier of the instance. -Family tells how should be instance processed and subset what name will published item have. -- There are cases when subset is not fully filled during creation and may change during publishing. That is in most of cases caused because instance is related to other instance or instance data do not represent final product. +Product of creation is "instance" which holds basic data defying it. Core data are `creator_identifier`, `productType` and `productName`. Other data can be keys used to fill product name or metadata modifying publishing process of the instance (more described later). All instances have `id` which holds constant `ayon.create.instance` or `pyblish.avalon.instance` (for backwards compatibility) and `instance_id` which is identifier of the instance. +Product type tells how should be instance processed and product name what name will published item have. +- There are cases when product name is not fully filled during creation and may change during publishing. That is in most of cases caused because instance is related to other instance or instance data do not represent final product. `CreatedInstance` is entity holding the data which are stored and used. @@ -24,15 +24,15 @@ Family tells how should be instance processed and subset what name will publishe { # Immutable data after creation ## Identifier that this data represents instance for publishing (automatically assigned) - "id": "pyblish.avalon.instance", + "id": "ayon.create.instance", ## Identifier of this specific instance (automatically assigned) "instance_id": , - ## Instance family (used from Creator) - "family": , + ## Instance product type (used from Creator) + "productType": , # Mutable data - ## Subset name based on subset name template - may change overtime (on context change) - "subset": , + ## Product name based on product name template - may change overtime (on context change) + "productName": , ## Instance is active and will be published "active": True, ## Version of instance @@ -48,13 +48,13 @@ Family tells how should be instance processed and subset what name will publishe : {...}, ... }, - ## Additional data related to instance (`asset`, `task`, etc.) + ## Additional data related to instance (`folderPath`, `task`, etc.) ... } ``` ## Creator -To be able create, update, remove or collect existing instances there must be defined a creator. Creator must have unique identifier and can represents a family. There can be multiple Creators for single family. Identifier of creator should contain family (advise). +To be able create, update, remove or collect existing instances there must be defined a creator. Creator must have unique identifier and can represent a product type. There can be multiple Creators for single product type. Identifier of creator should contain product type (advise). Creator has abstract methods to handle instances. For new instance creation is used `create` which should create metadata in host context and add new instance object to `CreateContext`. To collect existing instances is used `collect_instances` which should find all existing instances related to creator and add them to `CreateContext`. To update data of instance is used `update_instances` which is called from `CreateContext` on `save_changes`. To remove instance use `remove_instances` which should remove metadata from host context and remove instance from `CreateContext`. diff --git a/client/ayon_core/pipeline/create/__init__.py b/client/ayon_core/pipeline/create/__init__.py index 94d575a776..da9cafad5a 100644 --- a/client/ayon_core/pipeline/create/__init__.py +++ b/client/ayon_core/pipeline/create/__init__.py @@ -1,6 +1,6 @@ from .constants import ( - SUBSET_NAME_ALLOWED_SYMBOLS, - DEFAULT_SUBSET_TEMPLATE, + PRODUCT_NAME_ALLOWED_SYMBOLS, + DEFAULT_PRODUCT_TEMPLATE, PRE_CREATE_THUMBNAIL_KEY, DEFAULT_VARIANT_VALUE, ) @@ -10,10 +10,10 @@ from .utils import ( get_next_versions_for_instances, ) -from .subset_name import ( +from .product_name import ( TaskNotSetError, - get_subset_name_template, - get_subset_name, + get_product_name, + get_product_name_template, ) from .creator_plugins import ( @@ -48,8 +48,8 @@ from .legacy_create import ( __all__ = ( - "SUBSET_NAME_ALLOWED_SYMBOLS", - "DEFAULT_SUBSET_TEMPLATE", + "PRODUCT_NAME_ALLOWED_SYMBOLS", + "DEFAULT_PRODUCT_TEMPLATE", "PRE_CREATE_THUMBNAIL_KEY", "DEFAULT_VARIANT_VALUE", @@ -57,8 +57,8 @@ __all__ = ( "get_next_versions_for_instances", "TaskNotSetError", - "get_subset_name_template", - "get_subset_name", + "get_product_name", + "get_product_name_template", "CreatorError", diff --git a/client/ayon_core/pipeline/create/constants.py b/client/ayon_core/pipeline/create/constants.py index 7d1d0154e9..a0bcea55ff 100644 --- a/client/ayon_core/pipeline/create/constants.py +++ b/client/ayon_core/pipeline/create/constants.py @@ -1,12 +1,12 @@ -SUBSET_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_." -DEFAULT_SUBSET_TEMPLATE = "{family}{Variant}" +PRODUCT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_." +DEFAULT_PRODUCT_TEMPLATE = "{family}{Variant}" PRE_CREATE_THUMBNAIL_KEY = "thumbnail_source" DEFAULT_VARIANT_VALUE = "Main" __all__ = ( - "SUBSET_NAME_ALLOWED_SYMBOLS", - "DEFAULT_SUBSET_TEMPLATE", + "PRODUCT_NAME_ALLOWED_SYMBOLS", + "DEFAULT_PRODUCT_TEMPLATE", "PRE_CREATE_THUMBNAIL_KEY", "DEFAULT_VARIANT_VALUE", ) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 8990d50324..425de4305f 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -16,10 +16,7 @@ from ayon_core.client import ( get_asset_by_name, get_asset_name_identifier, ) -from ayon_core.settings import ( - get_system_settings, - get_project_settings -) +from ayon_core.settings import get_project_settings from ayon_core.lib.attribute_definitions import ( UnknownDef, serialize_attr_defs, @@ -27,7 +24,11 @@ from ayon_core.lib.attribute_definitions import ( get_default_values, ) from ayon_core.host import IPublishHost, IWorkfileHost -from ayon_core.pipeline import legacy_io, Anatomy +from ayon_core.pipeline import ( + Anatomy, + AYON_INSTANCE_ID, + AVALON_INSTANCE_ID, +) from ayon_core.pipeline.plugin_discover import DiscoverResult from .creator_plugins import ( @@ -90,7 +91,7 @@ class ConvertorsOperationFailed(Exception): class ConvertorsFindFailed(ConvertorsOperationFailed): def __init__(self, failed_info): - msg = "Failed to find incompatible subsets" + msg = "Failed to find incompatible products" super(ConvertorsFindFailed, self).__init__( msg, failed_info ) @@ -98,7 +99,7 @@ class ConvertorsFindFailed(ConvertorsOperationFailed): class ConvertorsConversionFailed(ConvertorsOperationFailed): def __init__(self, failed_info): - msg = "Failed to convert incompatible subsets" + msg = "Failed to convert incompatible products" super(ConvertorsConversionFailed, self).__init__( msg, failed_info ) @@ -853,8 +854,8 @@ class CreatedInstance: """Instance entity with data that will be stored to workfile. I think `data` must be required argument containing all minimum information - about instance like "asset" and "task" and all data used for filling subset - name as creators may have custom data for subset name filling. + about instance like "asset" and "task" and all data used for filling + product name as creators may have custom data for product name filling. Notes: Object have 2 possible initialization. One using 'creator' object which @@ -862,9 +863,9 @@ class CreatedInstance: creator. Args: - family (str): Name of family that will be created. - subset_name (str): Name of subset that will be created. - data (Dict[str, Any]): Data used for filling subset name or override + product_type (str): Product type that will be created. + product_name (str): Name of product that will be created. + data (Dict[str, Any]): Data used for filling product name or override data from already existing instance. creator (Union[BaseCreator, None]): Creator responsible for instance. creator_identifier (str): Identifier of creator plugin. @@ -881,7 +882,7 @@ class CreatedInstance: __immutable_keys = ( "id", "instance_id", - "family", + "product_type", "creator_identifier", "creator_attributes", "publish_attributes" @@ -889,8 +890,8 @@ class CreatedInstance: def __init__( self, - family, - subset_name, + product_type, + product_name, data, creator=None, creator_identifier=None, @@ -925,8 +926,10 @@ class CreatedInstance: # Store original value of passed data self._orig_data = copy.deepcopy(data) - # Pop family and subset to prevent unexpected changes - # TODO change to 'productType' and 'productName' in AYON + # Pop 'productType' and 'productName' to prevent unexpected changes + data.pop("productType", None) + data.pop("productName", None) + # Backwards compatibility with OpenPype instances data.pop("family", None) data.pop("subset", None) @@ -937,9 +940,13 @@ class CreatedInstance: # QUESTION Does it make sense to have data stored as ordered dict? self._data = collections.OrderedDict() # QUESTION Do we need this "id" information on instance? - self._data["id"] = "pyblish.avalon.instance" - self._data["family"] = family - self._data["subset"] = subset_name + item_id = data.get("id") + # TODO use only 'AYON_INSTANCE_ID' when all hosts support it + if item_id not in {AYON_INSTANCE_ID, AVALON_INSTANCE_ID}: + item_id = AVALON_INSTANCE_ID + self._data["id"] = item_id + self._data["productType"] = product_type + self._data["productName"] = product_name self._data["active"] = data.get("active", True) self._data["creator_identifier"] = creator_identifier @@ -980,12 +987,11 @@ class CreatedInstance: def __str__(self): return ( - "" - " {data}" + " {data}" ).format( - subset=str(self._data), creator_identifier=self.creator_identifier, - family=self.family, + product={"name": self.product_name, "type": self.product_type}, data=str(self._data) ) @@ -1026,18 +1032,18 @@ class CreatedInstance: # ------ @property - def family(self): - return self._data["family"] + def product_type(self): + return self._data["productType"] @property - def subset_name(self): - return self._data["subset"] + def product_name(self): + return self._data["productName"] @property def label(self): label = self._data.get("label") if not label: - label = self.subset_name + label = self.product_name return label @property @@ -1184,13 +1190,17 @@ class CreatedInstance: instance_data = copy.deepcopy(instance_data) - family = instance_data.get("family", None) - if family is None: - family = creator.family - subset_name = instance_data.get("subset", None) + product_type = instance_data.get("productType") + if product_type is None: + product_type = instance_data.get("family") + if product_type is None: + product_type = creator.product_type + product_name = instance_data.get("productName") + if product_name is None: + product_name = instance_data.get("subset") return cls( - family, subset_name, instance_data, creator + product_type, product_name, instance_data, creator ) def set_publish_plugins(self, attr_plugins): @@ -1247,8 +1257,8 @@ class CreatedInstance: instance_data = copy.deepcopy(serialized_data["data"]) creator_identifier = instance_data["creator_identifier"] - family = instance_data["family"] - subset_name = instance_data.get("subset", None) + product_type = instance_data["productType"] + product_name = instance_data.get("productName", None) creator_label = serialized_data["creator_label"] group_label = serialized_data["group_label"] @@ -1258,8 +1268,8 @@ class CreatedInstance: publish_attributes = serialized_data["publish_attributes"] obj = cls( - family, - subset_name, + product_type, + product_name, instance_data, creator_identifier=creator_identifier, creator_label=creator_label, @@ -1423,7 +1433,7 @@ class CreateContext: self.publish_plugins_mismatch_targets = [] self.publish_plugins = [] self.plugins_with_defs = [] - self._attr_plugins_by_family = {} + self._attr_plugins_by_product_type = {} # Helpers for validating context of collected instances # - they can be validation for multiple instances at one time @@ -1536,7 +1546,7 @@ class CreateContext: def host_name(self): if hasattr(self.host, "name"): return self.host.name - return os.environ["AVALON_APP"] + return os.environ["AYON_HOST_NAME"] def get_current_project_name(self): """Project name which was used as current context on context reset. @@ -1678,31 +1688,22 @@ class CreateContext: host_context = self.host.get_current_context() if host_context: project_name = host_context.get("project_name") - asset_name = host_context.get("asset_name") + asset_name = host_context.get("folder_path") task_name = host_context.get("task_name") if isinstance(self.host, IWorkfileHost): workfile_path = self.host.get_current_workfile() - # --- TODO remove these conditions --- - if not project_name: - project_name = legacy_io.Session.get("AVALON_PROJECT") - if not asset_name: - asset_name = legacy_io.Session.get("AVALON_ASSET") - if not task_name: - task_name = legacy_io.Session.get("AVALON_TASK") - # --- return project_name, asset_name, task_name, workfile_path def reset_current_context(self): """Refresh current context. Reset is based on optional host implementation of `get_current_context` - function or using `legacy_io.Session`. + function. Some hosts have ability to change context file without using workfiles - tool but that change is not propagated to 'legacy_io.Session' - nor 'os.environ'. + tool but that change is not propagated to 'os.environ'. Todos: UI: Current context should be also checked on save - compare @@ -1741,7 +1742,7 @@ class CreateContext: ) # Reset publish plugins - self._attr_plugins_by_family = {} + self._attr_plugins_by_product_type = {} discover_result = DiscoverResult(pyblish.api.Plugin) plugins_with_defs = [] @@ -1775,7 +1776,6 @@ class CreateContext: def _reset_creator_plugins(self): # Prepare settings - system_settings = get_system_settings() project_settings = get_project_settings(self.project_name) # Discover and prepare creators @@ -1813,7 +1813,6 @@ class CreateContext: creator = creator_class( project_settings, - system_settings, self, self.headless ) @@ -1913,8 +1912,8 @@ class CreateContext: self._instances_by_id[instance.id] = instance # Prepare publish plugin attributes and set it on instance - attr_plugins = self._get_publish_plugins_with_attr_for_family( - instance.family + attr_plugins = self._get_publish_plugins_with_attr_for_product_type( + instance.product_type ) instance.set_publish_plugins(attr_plugins) @@ -1961,11 +1960,11 @@ class CreateContext: values. If only 'task_name' is provided it will be overriden by task name from current context. If 'task_name' is not provided when 'asset_doc' is, it is considered that task name is not specified, - which can lead to error if subset name template requires task name. + which can lead to error if product name template requires task name. Args: creator_identifier (str): Identifier of creator plugin. - variant (str): Variant used for subset name. + variant (str): Variant used for product name. asset_doc (Dict[str, Any]): Asset document which define context of creation (possible context of created instance/s). task_name (str): Name of task to which is context related. @@ -2004,23 +2003,23 @@ class CreateContext: # TODO validate types _pre_create_data.update(pre_create_data) - subset_name = creator.get_subset_name( - variant, - task_name, - asset_doc, + product_name = creator.get_product_name( project_name, - self.host_name + asset_doc, + task_name, + variant, + self.host_name, ) asset_name = get_asset_name_identifier(asset_doc) instance_data = { "folderPath": asset_name, "task": task_name, - "family": creator.family, + "productType": creator.product_type, "variant": variant } return creator.create( - subset_name, + product_name, instance_data, _pre_create_data ) @@ -2427,29 +2426,29 @@ class CreateContext: if failed_info: raise CreatorsRemoveFailed(failed_info) - def _get_publish_plugins_with_attr_for_family(self, family): - """Publish plugin attributes for passed family. + def _get_publish_plugins_with_attr_for_product_type(self, product_type): + """Publish plugin attributes for passed product type. - Attribute definitions for specific family are cached. + Attribute definitions for specific product type are cached. Args: - family(str): Instance family for which should be attribute - definitions returned. + product_type(str): Instance product type for which should be + attribute definitions returned. """ - if family not in self._attr_plugins_by_family: + if product_type not in self._attr_plugins_by_product_type: import pyblish.logic filtered_plugins = pyblish.logic.plugins_by_families( - self.plugins_with_defs, [family] + self.plugins_with_defs, [product_type] ) plugins = [] for plugin in filtered_plugins: if plugin.__instanceEnabled__: plugins.append(plugin) - self._attr_plugins_by_family[family] = plugins + self._attr_plugins_by_product_type[product_type] = plugins - return self._attr_plugins_by_family[family] + return self._attr_plugins_by_product_type[product_type] def _get_publish_plugins_with_attr_for_context(self): """Publish plugins attributes for Context plugins. diff --git a/client/ayon_core/pipeline/create/creator_plugins.py b/client/ayon_core/pipeline/create/creator_plugins.py index 6fa0d2ffa1..cb8e4a2d1c 100644 --- a/client/ayon_core/pipeline/create/creator_plugins.py +++ b/client/ayon_core/pipeline/create/creator_plugins.py @@ -6,8 +6,8 @@ from abc import ABCMeta, abstractmethod import six -from ayon_core.settings import get_system_settings, get_project_settings -from ayon_core.lib import Logger, is_func_signature_supported +from ayon_core.settings import get_project_settings +from ayon_core.lib import Logger from ayon_core.pipeline.plugin_discover import ( discover, register_plugin, @@ -17,7 +17,7 @@ from ayon_core.pipeline.plugin_discover import ( ) from .constants import DEFAULT_VARIANT_VALUE -from .subset_name import get_subset_name +from .product_name import get_product_name from .utils import get_next_versions_for_instances from .legacy_create import LegacyCreator @@ -179,7 +179,7 @@ class BaseCreator: # Creator is enabled (Probably does not have reason of existence?) enabled = True - # Creator (and family) icon + # Creator (and product type) icon # - may not be used if `get_icon` is reimplemented icon = None @@ -201,7 +201,7 @@ class BaseCreator: settings_name = None def __init__( - self, project_settings, system_settings, create_context, headless=False + self, project_settings, create_context, headless=False ): # Reference to CreateContext self.create_context = create_context @@ -211,34 +211,7 @@ class BaseCreator: # - we may use UI inside processing this attribute should be checked self.headless = headless - expect_system_settings = False - if is_func_signature_supported( - self.apply_settings, project_settings - ): - self.apply_settings(project_settings) - else: - expect_system_settings = True - # Backwards compatibility for system settings - self.apply_settings(project_settings, system_settings) - - init_use_base = any( - self.__class__.__init__ is cls.__init__ - for cls in { - BaseCreator, - Creator, - HiddenCreator, - AutoCreator, - } - ) - if not init_use_base or expect_system_settings: - self.log.warning(( - "WARNING: Source - Create plugin {}." - " System settings argument will not be passed to" - " '__init__' and 'apply_settings' methods in future versions" - " of OpenPype. Planned version to drop the support" - " is 3.16.6 or 3.17.0. Please contact Ynput core team if you" - " need to keep system settings." - ).format(self.__class__.__name__)) + self.apply_settings(project_settings) @staticmethod def _get_settings_values(project_settings, category_name, plugin_name): @@ -329,14 +302,14 @@ class BaseCreator: def identifier(self): """Identifier of creator (must be unique). - Default implementation returns plugin's family. + Default implementation returns plugin's product type. """ - return self.family + return self.product_type @property @abstractmethod - def family(self): + def product_type(self): """Family that plugin represents.""" pass @@ -370,7 +343,7 @@ class BaseCreator: Default implementation use attributes in this order: - 'group_label' -> 'label' -> 'identifier' - Keep in mind that 'identifier' use 'family' by default. + Keep in mind that 'identifier' use 'product_type' by default. Returns: str: Group label that can be used for grouping of instances in UI. @@ -489,7 +462,7 @@ class BaseCreator: pass def get_icon(self): - """Icon of creator (family). + """Icon of creator (product type). Can return path to image file or awesome icon name. """ @@ -497,9 +470,15 @@ class BaseCreator: return self.icon def get_dynamic_data( - self, variant, task_name, asset_doc, project_name, host_name, instance + self, + project_name, + asset_doc, + task_name, + variant, + host_name, + instance ): - """Dynamic data for subset name filling. + """Dynamic data for product name filling. These may be get dynamically created based on current context of workfile. @@ -507,16 +486,16 @@ class BaseCreator: return {} - def get_subset_name( + def get_product_name( self, - variant, - task_name, - asset_doc, project_name, + asset_doc, + task_name, + variant, host_name=None, instance=None ): - """Return subset name for passed context. + """Return product name for passed context. CHANGES: Argument `asset_id` was replaced with `asset_doc`. It is easier to @@ -526,33 +505,41 @@ class BaseCreator: NOTE: Asset document is not used yet but is required if would like to use - task type in subset templates. + task type in product templates. - Method is also called on subset name update. In that case origin + Method is also called on product name update. In that case origin instance is passed in. Args: - variant(str): Subset name variant. In most of cases user input. - task_name(str): For which task subset is created. - asset_doc(dict): Asset document for which subset is created. - project_name(str): Project name. - host_name(str): Which host creates subset. - instance(CreatedInstance|None): Object of 'CreatedInstance' for - which is subset name updated. Passed only on subset name + project_name (str): Project name. + asset_doc (dict): Asset document for which product is created. + task_name (str): For which task product is created. + variant (str): Product name variant. In most of cases user input. + host_name (Optional[str]): Which host creates product. Defaults + to host name on create context. + instance (Optional[CreatedInstance]): Object of 'CreatedInstance' + for which is product name updated. Passed only on product name update. """ + if host_name is None: + host_name = self.create_context.host_name dynamic_data = self.get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name, instance + project_name, + asset_doc, + task_name, + variant, + host_name, + instance ) - return get_subset_name( - self.family, - variant, - task_name, - asset_doc, + return get_product_name( project_name, + asset_doc, + task_name, host_name, + self.product_type, + variant, dynamic_data=dynamic_data, project_settings=self.project_settings ) @@ -599,8 +586,8 @@ class BaseCreator: """Prepare next versions for instances. This is helper method to receive next possible versions for instances. - It is using context information on instance to receive them, 'asset' - and 'subset'. + It is using context information on instance to receive them, + 'folderPath' and 'product'. Output will contain version by each instance id. @@ -620,7 +607,7 @@ class BaseCreator: class Creator(BaseCreator): """Creator that has more information for artist to show in UI. - Creation requires prepared subset name and instance data. + Creation requires prepared product name and instance data. """ # GUI Purposes @@ -630,11 +617,11 @@ class Creator(BaseCreator): # Default variant used in 'get_default_variant' _default_variant = None - # Short description of family + # Short description of product type # - may not be used if `get_description` is overriden description = None - # Detailed description of family for artists + # Detailed description of product type for artists # - may not be used if `get_detail_description` is overriden detailed_description = None @@ -681,39 +668,39 @@ class Creator(BaseCreator): return self.order @abstractmethod - def create(self, subset_name, instance_data, pre_create_data): + def create(self, product_name, instance_data, pre_create_data): """Create new instance and store it. Ideally should be stored to workfile using host implementation. Args: - subset_name(str): Subset name of created instance. + product_name(str): Product name of created instance. instance_data(dict): Base data for instance. pre_create_data(dict): Data based on pre creation attributes. Those may affect how creator works. """ # instance = CreatedInstance( - # self.family, subset_name, instance_data + # self.product_type, product_name, instance_data # ) pass def get_description(self): - """Short description of family and plugin. + """Short description of product type and plugin. Returns: - str: Short description of family. + str: Short description of product type. """ return self.description def get_detail_description(self): - """Description of family and plugin. + """Description of product type and plugin. Can be detailed with markdown or html tags. Returns: - str: Detailed description of family for artist. + str: Detailed description of product type for artist. """ return self.detailed_description @@ -838,11 +825,10 @@ def discover_legacy_creator_plugins(): plugins = discover(LegacyCreator) project_name = get_current_project_name() - system_settings = get_system_settings() project_settings = get_project_settings(project_name) for plugin in plugins: try: - plugin.apply_settings(project_settings, system_settings) + plugin.apply_settings(project_settings) except Exception: log.warning( "Failed to apply settings to creator {}".format( diff --git a/client/ayon_core/pipeline/create/legacy_create.py b/client/ayon_core/pipeline/create/legacy_create.py index 08be32eed4..5e23a74a79 100644 --- a/client/ayon_core/pipeline/create/legacy_create.py +++ b/client/ayon_core/pipeline/create/legacy_create.py @@ -10,42 +10,44 @@ import logging import collections from ayon_core.client import get_asset_by_id +from ayon_core.pipeline.constants import AVALON_INSTANCE_ID -from .subset_name import get_subset_name +from .product_name import get_product_name class LegacyCreator(object): """Determine how assets are created""" label = None - family = None + product_type = None defaults = None maintain_selection = True enabled = True - dynamic_subset_keys = [] + dynamic_product_name_keys = [] log = logging.getLogger("LegacyCreator") log.propagate = True - def __init__(self, name, asset, options=None, data=None): + def __init__(self, name, folder_path, options=None, data=None): self.name = name # For backwards compatibility self.options = options # Default data self.data = collections.OrderedDict() - self.data["id"] = "pyblish.avalon.instance" - self.data["family"] = self.family - self.data["asset"] = asset - self.data["subset"] = name + # TODO use 'AYON_INSTANCE_ID' when all hosts support it + self.data["id"] = AVALON_INSTANCE_ID + self.data["productType"] = self.product_type + self.data["folderPath"] = folder_path + self.data["productName"] = name self.data["active"] = True self.data.update(data or {}) @classmethod - def apply_settings(cls, project_settings, system_settings): + def apply_settings(cls, project_settings): """Apply OpenPype settings to a plugin class.""" - host_name = os.environ.get("AVALON_APP") + host_name = os.environ.get("AYON_HOST_NAME") plugin_type = "create" plugin_type_settings = ( project_settings @@ -54,7 +56,7 @@ class LegacyCreator(object): ) global_type_settings = ( project_settings - .get("global", {}) + .get("core", {}) .get(plugin_type, {}) ) if not global_type_settings and not plugin_type_settings: @@ -87,23 +89,23 @@ class LegacyCreator(object): @classmethod def get_dynamic_data( - cls, variant, task_name, asset_id, project_name, host_name + cls, project_name, asset_id, task_name, variant, host_name ): """Return dynamic data for current Creator plugin. - By default return keys from `dynamic_subset_keys` attribute as mapping - to keep formatted template unchanged. + By default return keys from `dynamic_product_name_keys` attribute + as mapping to keep formatted template unchanged. ``` - dynamic_subset_keys = ["my_key"] + dynamic_product_name_keys = ["my_key"] --- output = { "my_key": "{my_key}" } ``` - Dynamic keys may override default Creator keys (family, task, asset, - ...) but do it wisely if you need. + Dynamic keys may override default Creator keys (productType, task, + folderPath, ...) but do it wisely if you need. All of keys will be converted into 3 variants unchanged, capitalized and all upper letters. Because of that are all keys lowered. @@ -112,54 +114,54 @@ class LegacyCreator(object): is class method. Returns: - dict: Fill data for subset name template. + dict: Fill data for product name template. """ dynamic_data = {} - for key in cls.dynamic_subset_keys: + for key in cls.dynamic_product_name_keys: key = key.lower() dynamic_data[key] = "{" + key + "}" return dynamic_data @classmethod - def get_subset_name( - cls, variant, task_name, asset_id, project_name, host_name=None + def get_product_name( + cls, project_name, asset_id, task_name, variant, host_name=None ): - """Return subset name created with entered arguments. + """Return product name created with entered arguments. Logic extracted from Creator tool. This method should give ability - to get subset name without the tool. + to get product name without the tool. TODO: Maybe change `variant` variable. - By default is output concatenated family with user text. + By default is output concatenated product type with variant. Args: - variant (str): What is entered by user in creator tool. - task_name (str): Context's task name. - asset_id (ObjectId): Mongo ID of context's asset. project_name (str): Context's project name. + asset_id (str): Folder id. + task_name (str): Context's task name. + variant (str): What is entered by user in creator tool. host_name (str): Name of host. Returns: - str: Formatted subset name with entered arguments. Should match + str: Formatted product name with entered arguments. Should match config's logic. """ dynamic_data = cls.get_dynamic_data( - variant, task_name, asset_id, project_name, host_name + project_name, asset_id, task_name, variant, host_name ) asset_doc = get_asset_by_id( project_name, asset_id, fields=["data.tasks"] ) - return get_subset_name( - cls.family, - variant, - task_name, - asset_doc, + return get_product_name( project_name, + asset_doc, + task_name, host_name, + cls.product_type, + variant, dynamic_data=dynamic_data ) @@ -167,23 +169,23 @@ class LegacyCreator(object): def legacy_create(Creator, name, asset, options=None, data=None): """Create a new instance - Associate nodes with a subset and family. These nodes are later - validated, according to their `family`, and integrated into the - shared environment, relative their `subset`. + Associate nodes with a product name and type. These nodes are later + validated, according to their `product type`, and integrated into the + shared environment, relative their `productName`. - Data relative each family, along with default data, are imprinted + Data relative each product type, along with default data, are imprinted into the resulting objectSet. This data is later used by extractors and finally asset browsers to help identify the origin of the asset. Arguments: Creator (Creator): Class of creator - name (str): Name of subset + name (str): Name of product asset (str): Name of asset options (dict, optional): Additional options from GUI data (dict, optional): Additional data from GUI Raises: - NameError on `subset` already exists + NameError on `productName` already exists KeyError on invalid dynamic property RuntimeError on host error diff --git a/client/ayon_core/pipeline/create/subset_name.py b/client/ayon_core/pipeline/create/product_name.py similarity index 57% rename from client/ayon_core/pipeline/create/subset_name.py rename to client/ayon_core/pipeline/create/product_name.py index 3892971ce8..8413bfa9d8 100644 --- a/client/ayon_core/pipeline/create/subset_name.py +++ b/client/ayon_core/pipeline/create/product_name.py @@ -2,56 +2,55 @@ import os from ayon_core.settings import get_project_settings from ayon_core.lib import filter_profiles, prepare_template_data -from ayon_core.pipeline import legacy_io -from .constants import DEFAULT_SUBSET_TEMPLATE +from .constants import DEFAULT_PRODUCT_TEMPLATE class TaskNotSetError(KeyError): def __init__(self, msg=None): if not msg: - msg = "Creator's subset name template requires task name." + msg = "Creator's product name template requires task name." super(TaskNotSetError, self).__init__(msg) class TemplateFillError(Exception): def __init__(self, msg=None): if not msg: - msg = "Creator's subset name template is missing key value." + msg = "Creator's product name template is missing key value." super(TemplateFillError, self).__init__(msg) -def get_subset_name_template( +def get_product_name_template( project_name, - family, + product_type, task_name, task_type, host_name, default_template=None, project_settings=None ): - """Get subset name template based on passed context. + """Get product name template based on passed context. Args: project_name (str): Project on which the context lives. - family (str): Family (subset type) for which the subset name is + product_type (str): Product type for which the subset name is calculated. - host_name (str): Name of host in which the subset name is calculated. - task_name (str): Name of task in which context the subset is created. - task_type (str): Type of task in which context the subset is created. + host_name (str): Name of host in which the product name is calculated. + task_name (str): Name of task in which context the product is created. + task_type (str): Type of task in which context the product is created. default_template (Union[str, None]): Default template which is used if settings won't find any matching possitibility. Constant - 'DEFAULT_SUBSET_TEMPLATE' is used if not defined. + 'DEFAULT_PRODUCT_TEMPLATE' is used if not defined. project_settings (Union[Dict[str, Any], None]): Prepared settings for project. Settings are queried if not passed. """ if project_settings is None: project_settings = get_project_settings(project_name) - tools_settings = project_settings["global"]["tools"] - profiles = tools_settings["creator"]["subset_name_profiles"] + tools_settings = project_settings["core"]["tools"] + profiles = tools_settings["creator"]["product_name_profiles"] filtering_criteria = { - "families": family, + "product_types": product_type, "hosts": host_name, "tasks": task_name, "task_types": task_type @@ -60,90 +59,86 @@ def get_subset_name_template( matching_profile = filter_profiles(profiles, filtering_criteria) template = None if matching_profile: - template = matching_profile["template"] + # TODO remove formatting keys replacement + template = ( + matching_profile["template"] + .replace("{task[name]}", "{task}") + .replace("{Task[name]}", "{Task}") + .replace("{TASK[NAME]}", "{TASK}") + .replace("{product[type]}", "{family}") + .replace("{Product[type]}", "{Family}") + .replace("{PRODUCT[TYPE]}", "{FAMILY}") + .replace("{folder[name]}", "{asset}") + .replace("{Folder[name]}", "{Asset}") + .replace("{FOLDER[NAME]}", "{ASSET}") + ) # Make sure template is set (matching may have empty string) if not template: - template = default_template or DEFAULT_SUBSET_TEMPLATE + template = default_template or DEFAULT_PRODUCT_TEMPLATE return template -def get_subset_name( - family, - variant, - task_name, +def get_product_name( + project_name, asset_doc, - project_name=None, - host_name=None, + task_name, + host_name, + product_type, + variant, default_template=None, dynamic_data=None, project_settings=None, - family_filter=None, + product_type_filter=None, ): - """Calculate subset name based on passed context and OpenPype settings. + """Calculate product name based on passed context and AYON settings. Subst name templates are defined in `project_settings/global/tools/creator - /subset_name_profiles` where are profiles with host name, family, task name - and task type filters. If context does not match any profile then - `DEFAULT_SUBSET_TEMPLATE` is used as default template. + /product_name_profiles` where are profiles with host name, product type, + task name and task type filters. If context does not match any profile + then `DEFAULT_PRODUCT_TEMPLATE` is used as default template. - That's main reason why so many arguments are required to calculate subset + That's main reason why so many arguments are required to calculate product name. - Option to pass family filter was added for special cases when creator or - automated publishing require special subset name template which would be - hard to maintain using its family value. - Why not just pass the right family? -> Family is also used as fill - value and for filtering of publish plugins. - Todos: Find better filtering options to avoid requirement of argument 'family_filter'. Args: - family (str): Instance family. + project_name (str): Project name. + host_name (str): Host name. + product_type (str): Product type. variant (str): In most of the cases it is user input during creation. task_name (str): Task name on which context is instance created. asset_doc (dict): Queried asset document with its tasks in data. Used to get task type. - project_name (Optional[str]): Name of project on which is instance - created. Important for project settings that are loaded. - host_name (Optional[str]): One of filtering criteria for template - profile filters. default_template (Optional[str]): Default template if any profile does - not match passed context. Constant 'DEFAULT_SUBSET_TEMPLATE' + not match passed context. Constant 'DEFAULT_PRODUCT_TEMPLATE' is used if is not passed. dynamic_data (Optional[Dict[str, Any]]): Dynamic data specific for a creator which creates instance. project_settings (Optional[Union[Dict[str, Any]]]): Prepared settings for project. Settings are queried if not passed. - family_filter (Optional[str]): Use different family for subset template - filtering. Value of 'family' is used when not passed. + product_type_filter (Optional[str]): Use different product type for + product template filtering. Value of `product_type` is used when + not passed. Raises: - TemplateFillError: If filled template contains placeholder key which is not - collected. + TemplateFillError: If filled template contains placeholder key which + is not collected. """ - if not family: + if not product_type: return "" - if not host_name: - host_name = os.environ.get("AVALON_APP") - - # Use only last part of class family value split by dot (`.`) - family = family.rsplit(".", 1)[-1] - - if project_name is None: - project_name = legacy_io.Session["AVALON_PROJECT"] - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} task_info = asset_tasks.get(task_name) or {} task_type = task_info.get("type") - template = get_subset_name_template( + template = get_product_name_template( project_name, - family_filter or family, + product_type_filter or product_type, task_name, task_type, host_name, @@ -155,10 +150,20 @@ def get_subset_name( if not task_name and "{task" in template.lower(): raise TaskNotSetError() + task_value = { + "name": task_name, + "type": task_type, + } + if "{task}" in template.lower(): + task_value = task_name + fill_pairs = { "variant": variant, - "family": family, - "task": task_name + "family": product_type, + "task": task_value, + "product": { + "type": product_type + } } if dynamic_data: # Dynamic data may override default values diff --git a/client/ayon_core/pipeline/create/utils.py b/client/ayon_core/pipeline/create/utils.py index 0547c20c0a..44063bd9ac 100644 --- a/client/ayon_core/pipeline/create/utils.py +++ b/client/ayon_core/pipeline/create/utils.py @@ -11,14 +11,14 @@ from ayon_core.client import ( def get_last_versions_for_instances( project_name, instances, use_value_for_missing=False ): - """Get last versions for instances by their asset and subset name. + """Get last versions for instances by their folder path and product name. Args: project_name (str): Project name. instances (list[CreatedInstance]): Instances to get next versions for. use_value_for_missing (Optional[bool]): Missing values are replaced with negative value if True. Otherwise None is used. -2 is used - for instances without filled asset or subset name. -1 is used + for instances without filled folder or product name. -1 is used for missing entities. Returns: @@ -29,72 +29,72 @@ def get_last_versions_for_instances( instance.id: -1 if use_value_for_missing else None for instance in instances } - subset_names_by_asset_name = collections.defaultdict(set) + product_names_by_folder_path = collections.defaultdict(set) instances_by_hierarchy = {} for instance in instances: - asset_name = instance.data.get("asset") - subset_name = instance.subset_name - if not asset_name or not subset_name: + folder_path = instance.data.get("folderPath") + product_name = instance.product_name + if not folder_path or not product_name: if use_value_for_missing: output[instance.id] = -2 continue ( instances_by_hierarchy - .setdefault(asset_name, {}) - .setdefault(subset_name, []) + .setdefault(folder_path, {}) + .setdefault(product_name, []) .append(instance) ) - subset_names_by_asset_name[asset_name].add(subset_name) + product_names_by_folder_path[folder_path].add(product_name) - subset_names = set() - for names in subset_names_by_asset_name.values(): - subset_names |= names + product_names = set() + for names in product_names_by_folder_path.values(): + product_names |= names - if not subset_names: + if not product_names: return output asset_docs = get_assets( project_name, - asset_names=subset_names_by_asset_name.keys(), + asset_names=product_names_by_folder_path.keys(), fields=["name", "_id", "data.parents"] ) - asset_names_by_id = { + folder_paths_by_id = { asset_doc["_id"]: get_asset_name_identifier(asset_doc) for asset_doc in asset_docs } - if not asset_names_by_id: + if not folder_paths_by_id: return output subset_docs = get_subsets( project_name, - asset_ids=asset_names_by_id.keys(), - subset_names=subset_names, + asset_ids=folder_paths_by_id.keys(), + subset_names=product_names, fields=["_id", "name", "parent"] ) subset_docs_by_id = {} for subset_doc in subset_docs: # Filter subset docs by subset names under parent - asset_id = subset_doc["parent"] - asset_name = asset_names_by_id[asset_id] - subset_name = subset_doc["name"] - if subset_name not in subset_names_by_asset_name[asset_name]: + folder_id = subset_doc["parent"] + folder_path = folder_paths_by_id[folder_id] + product_name = subset_doc["name"] + if product_name not in product_names_by_folder_path[folder_path]: continue subset_docs_by_id[subset_doc["_id"]] = subset_doc if not subset_docs_by_id: return output - last_versions_by_subset_id = get_last_versions( + last_versions_by_product_id = get_last_versions( project_name, subset_docs_by_id.keys(), fields=["name", "parent"] ) - for subset_id, version_doc in last_versions_by_subset_id.items(): + for subset_id, version_doc in last_versions_by_product_id.items(): subset_doc = subset_docs_by_id[subset_id] - asset_id = subset_doc["parent"] - asset_name = asset_names_by_id[asset_id] - _instances = instances_by_hierarchy[asset_name][subset_doc["name"]] + folder_id = subset_doc["parent"] + folder_path = folder_paths_by_id[folder_id] + _instances = instances_by_hierarchy[folder_path][subset_doc["name"]] for instance in _instances: output[instance.id] = version_doc["name"] @@ -102,7 +102,7 @@ def get_last_versions_for_instances( def get_next_versions_for_instances(project_name, instances): - """Get next versions for instances by their asset and subset name. + """Get next versions for instances by their folder path and product name. Args: project_name (str): Project name. @@ -110,7 +110,7 @@ def get_next_versions_for_instances(project_name, instances): Returns: dict[str, Union[int, None]]: Next versions by instance id. Version is - 'None' if instance has no asset or subset name. + 'None' if instance has no folder path or product name. """ last_versions = get_last_versions_for_instances( diff --git a/client/ayon_core/pipeline/farm/patterning.py b/client/ayon_core/pipeline/farm/patterning.py index 1e4b5bf37d..d7b046113b 100644 --- a/client/ayon_core/pipeline/farm/patterning.py +++ b/client/ayon_core/pipeline/farm/patterning.py @@ -11,7 +11,7 @@ def match_aov_pattern(host_name, aov_patterns, render_file_name): that we have grabbed from `exp_files`. Args: - app (str): Host name. + host_name (str): Host name. aov_patterns (dict): AOV patterns from AOV filters. render_file_name (str): Incoming file name to match against. diff --git a/client/ayon_core/pipeline/farm/pyblish_functions.py b/client/ayon_core/pipeline/farm/pyblish_functions.py index 9423d8501c..c669d95c1e 100644 --- a/client/ayon_core/pipeline/farm/pyblish_functions.py +++ b/client/ayon_core/pipeline/farm/pyblish_functions.py @@ -58,16 +58,16 @@ def remap_source(path, anatomy): return source -def extend_frames(asset, subset, start, end): +def extend_frames(folder_path, product_name, start, end): """Get latest version of asset nad update frame range. Based on minimum and maximum values. Arguments: - asset (str): asset name - subset (str): subset name - start (int): start frame - end (int): end frame + folder_path (str): Folder path. + product_name (str): Product name. + start (int): Start frame. + end (int): End frame. Returns: (int, int): update frame start/end @@ -80,8 +80,8 @@ def extend_frames(asset, subset, start, end): project_name = get_current_project_name() version = get_last_version_by_subset_name( project_name, - subset, - asset_name=asset + product_name, + asset_name=folder_path ) # Set prev start / end frames for comparison @@ -197,8 +197,8 @@ def create_skeleton_instance( if data.get("extendFrames", False): time_data.start, time_data.end = extend_frames( - data["asset"], - data["subset"], + data["folderPath"], + data["productName"], time_data.start, time_data.end, ) @@ -215,20 +215,20 @@ def create_skeleton_instance( log.warning(("Could not find root path for remapping \"{}\". " "This may cause issues.").format(source)) - family = ("render" + product_type = ("render" if "prerender.farm" not in instance.data["families"] else "prerender") - families = [family] + families = [product_type] # pass review to families if marked as review if data.get("review"): families.append("review") instance_skeleton_data = { - "family": family, - "subset": data["subset"], + "productType": product_type, + "productName": data["productName"], "families": families, - "asset": data["asset"], + "folderPath": data["folderPath"], "frameStart": time_data.start, "frameEnd": time_data.end, "handleStart": time_data.handle_start, @@ -321,7 +321,7 @@ def prepare_representations(skeleton_data, exp_files, anatomy, aov_filter, """ representations = [] - host_name = os.environ.get("AVALON_APP", "") + host_name = os.environ.get("AYON_HOST_NAME", "") collections, remainders = clique.assemble(exp_files) log = Logger.get_logger("farm_publishing") @@ -472,8 +472,8 @@ def create_instances_for_aov(instance, skeleton, aov_filter, expected files. """ - # we cannot attach AOVs to other subsets as we consider every - # AOV subset of its own. + # we cannot attach AOVs to other products as we consider every + # AOV product of its own. log = Logger.get_logger("farm_publishing") additional_color_data = { @@ -493,7 +493,7 @@ def create_instances_for_aov(instance, skeleton, aov_filter, log.warning(e) additional_color_data["colorspaceTemplate"] = colorspace_template - # if there are subset to attach to and more than one AOV, + # if there are product to attach to and more than one AOV, # we cannot proceed. if ( len(instance.data.get("attachTo", [])) > 0 @@ -501,7 +501,7 @@ def create_instances_for_aov(instance, skeleton, aov_filter, ): raise KnownPublishError( "attaching multiple AOVs or renderable cameras to " - "subset is not supported yet.") + "product is not supported yet.") # create instances for every AOV we found in expected files. # NOTE: this is done for every AOV and every render camera (if @@ -541,10 +541,10 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data, """ # TODO: this needs to be taking the task from context or instance - task = os.environ["AVALON_TASK"] + task = os.environ["AYON_TASK_NAME"] anatomy = instance.context.data["anatomy"] - subset = skeleton["subset"] + s_product_name = skeleton["productName"] cameras = instance.data.get("cameras", []) exp_files = instance.data["expectedFiles"] log = Logger.get_logger("farm_publishing") @@ -570,34 +570,33 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data, ext = cols[0].tail.lstrip(".") col = list(cols[0]) - # create subset name `familyTaskSubset_AOV` + # create product name `` # TODO refactor/remove me - family = skeleton["family"] - if not subset.startswith(family): + product_type = skeleton["productType"] + if not s_product_name.startswith(product_type): group_name = '{}{}{}{}{}'.format( - family, + product_type, task[0].upper(), task[1:], - subset[0].upper(), subset[1:]) + s_product_name[0].upper(), s_product_name[1:]) else: - group_name = subset + group_name = s_product_name # if there are multiple cameras, we need to add camera name expected_filepath = col[0] if isinstance(col, (list, tuple)) else col cams = [cam for cam in cameras if cam in expected_filepath] if cams: for cam in cams: - if aov: - if not aov.startswith(cam): - subset_name = '{}_{}_{}'.format(group_name, cam, aov) - else: - subset_name = "{}_{}".format(group_name, aov) + if not aov: + product_name = '{}_{}'.format(group_name, cam) + elif not aov.startswith(cam): + product_name = '{}_{}_{}'.format(group_name, cam, aov) else: - subset_name = '{}_{}'.format(group_name, cam) + product_name = "{}_{}".format(group_name, aov) else: if aov: - subset_name = '{}_{}'.format(group_name, aov) + product_name = '{}_{}'.format(group_name, aov) else: - subset_name = '{}'.format(group_name) + product_name = '{}'.format(group_name) if isinstance(col, (list, tuple)): staging = os.path.dirname(col[0]) @@ -609,9 +608,9 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data, except ValueError as e: log.warning(e) - log.info("Creating data for: {}".format(subset_name)) + log.info("Creating data for: {}".format(product_name)) - app = os.environ.get("AVALON_APP", "") + app = os.environ.get("AYON_HOST_NAME", "") if isinstance(col, list): render_file_name = os.path.basename(col[0]) @@ -626,7 +625,7 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data, preview = True new_instance = deepcopy(skeleton) - new_instance["subset"] = subset_name + new_instance["productName"] = product_name new_instance["subsetGroup"] = group_name # explicitly disable review by user @@ -777,8 +776,8 @@ def create_skeleton_instance_cache(instance): if data.get("extendFrames", False): time_data.start, time_data.end = extend_frames( - data["asset"], - data["subset"], + data["folderPath"], + data["productName"], time_data.start, time_data.end, ) @@ -795,17 +794,18 @@ def create_skeleton_instance_cache(instance): log.warning(("Could not find root path for remapping \"{}\". " "This may cause issues.").format(source)) - family = instance.data["family"] + product_type = instance.data["productType"] # Make sure "render" is in the families to go through # validating expected and rendered files # during publishing job. - families = ["render", family] + families = ["render", product_type] instance_skeleton_data = { - "family": family, - "subset": data["subset"], + "productName": data["productName"], + "productType": product_type, + "family": product_type, "families": families, - "asset": data["asset"], + "folderPath": data["folderPath"], "frameStart": time_data.start, "frameEnd": time_data.end, "handleStart": time_data.handle_start, @@ -910,8 +910,8 @@ def create_instances_for_cache(instance, skeleton): """ anatomy = instance.context.data["anatomy"] - subset = skeleton["subset"] - family = skeleton["family"] + product_name = skeleton["productName"] + product_type = skeleton["productType"] exp_files = instance.data["expectedFiles"] log = Logger.get_logger("farm_publishing") @@ -948,9 +948,9 @@ def create_instances_for_cache(instance, skeleton): new_instance = deepcopy(skeleton) - new_instance["subset"] = subset - log.info("Creating data for: {}".format(subset)) - new_instance["family"] = family + new_instance["productName"] = product_name + log.info("Creating data for: {}".format(product_name)) + new_instance["productType"] = product_type new_instance["families"] = skeleton["families"] # create representation if isinstance(col, (list, tuple)): @@ -984,7 +984,7 @@ def create_instances_for_cache(instance, skeleton): def copy_extend_frames(instance, representation): """Copy existing frames from latest version. - This will copy all existing frames from subset's latest version back + This will copy all existing frames from product's latest version back to render directory and rename them to what renderer is expecting. Arguments: @@ -1005,20 +1005,20 @@ def copy_extend_frames(instance, representation): project_name = instance.context.data["project"] anatomy = instance.context.data["anatomy"] # type: Anatomy - # get latest version of subset - # this will stop if subset wasn't published yet + # get latest version of product + # this will stop if product wasn't published yet version = get_last_version_by_subset_name( project_name, - instance.data.get("subset"), - asset_name=instance.data.get("asset") + instance.data.get("productName"), + asset_name=instance.data.get("folderPath") ) # get its files based on extension - subset_resources = get_resources( + product_resources = get_resources( project_name, version, representation.get("ext") ) - r_col, _ = clique.assemble(subset_resources) + r_col, _ = clique.assemble(product_resources) # if override remove all frames we are expecting to be rendered, # so we'll copy only those missing from current render @@ -1064,11 +1064,11 @@ def copy_extend_frames(instance, representation): log.info("Finished copying %i files" % len(resource_files)) -def attach_instances_to_subset(attach_to, instances): - """Attach instance to subset. +def attach_instances_to_product(attach_to, instances): + """Attach instance to product. - If we are attaching to other subsets, create copy of existing - instances, change data to match its subset and replace + If we are attaching to other products, create copy of existing + instances, change data to match its product and replace existing instances with modified data. Args: @@ -1084,7 +1084,8 @@ def attach_instances_to_subset(attach_to, instances): for i in instances: new_inst = copy.deepcopy(i) new_inst["version"] = attach_instance.get("version") - new_inst["subset"] = attach_instance.get("subset") + new_inst["productName"] = attach_instance.get("productName") + new_inst["productType"] = attach_instance.get("productType") new_inst["family"] = attach_instance.get("family") new_inst["append"] = True # don't set subsetGroup if we are attaching @@ -1108,7 +1109,7 @@ def create_metadata_path(instance, anatomy): # directory is not available log.warning("Path is unreachable: `{}`".format(output_dir)) - metadata_filename = "{}_metadata.json".format(ins_data["subset"]) + metadata_filename = "{}_metadata.json".format(ins_data["productName"]) metadata_path = os.path.join(output_dir, metadata_filename) diff --git a/client/ayon_core/pipeline/farm/pyblish_functions.pyi b/client/ayon_core/pipeline/farm/pyblish_functions.pyi index d9d46a63be..16c11aa480 100644 --- a/client/ayon_core/pipeline/farm/pyblish_functions.pyi +++ b/client/ayon_core/pipeline/farm/pyblish_functions.pyi @@ -16,9 +16,9 @@ class TimeData: ... def remap_source(source: str, anatomy: Anatomy): ... -def extend_frames(asset: str, subset: str, start: int, end: int) -> Tuple[int, int]: ... +def extend_frames(folder_path: str, product_name: str, start: int, end: int) -> Tuple[int, int]: ... def get_time_data_from_instance_or_context(instance: pyblish.api.Instance) -> TimeData: ... def get_transferable_representations(instance: pyblish.api.Instance) -> list: ... def create_skeleton_instance(instance: pyblish.api.Instance, families_transfer: list = ..., instance_transfer: dict = ...) -> dict: ... def create_instances_for_aov(instance: pyblish.api.Instance, skeleton: dict, aov_filter: dict) -> List[pyblish.api.Instance]: ... -def attach_instances_to_subset(attach_to: list, instances: list) -> list: ... +def attach_instances_to_product(attach_to: list, instances: list) -> list: ... diff --git a/client/ayon_core/pipeline/farm/tools.py b/client/ayon_core/pipeline/farm/tools.py index f3acac7a32..8ab3b87ff6 100644 --- a/client/ayon_core/pipeline/farm/tools.py +++ b/client/ayon_core/pipeline/farm/tools.py @@ -5,8 +5,9 @@ def get_published_workfile_instance(context): """Find workfile instance in context""" for i in context: is_workfile = ( - "workfile" in i.data.get("families", []) or - i.data["family"] == "workfile" + i.data["productType"] == "workfile" + or "workfile" in i.data.get("families", []) + ) if not is_workfile: continue diff --git a/client/ayon_core/pipeline/legacy_io.py b/client/ayon_core/pipeline/legacy_io.py index cd09da2917..d5b555845b 100644 --- a/client/ayon_core/pipeline/legacy_io.py +++ b/client/ayon_core/pipeline/legacy_io.py @@ -1,109 +1,36 @@ -"""Wrapper around interactions with the database""" - -import os -import sys import logging -import functools - -from . import schema - -module = sys.modules[__name__] +from ayon_core.pipeline import get_current_project_name Session = {} -_is_installed = False log = logging.getLogger(__name__) - -SESSION_CONTEXT_KEYS = ( - # Name of current Project - "AVALON_PROJECT", - # Name of current Asset - "AVALON_ASSET", - # Name of current task - "AVALON_TASK", - # Name of current app - "AVALON_APP", - # Path to working directory - "AVALON_WORKDIR", - # Optional path to scenes directory (see Work Files API) - "AVALON_SCENEDIR" +log.warning( + "DEPRECATION WARNING: 'legacy_io' is deprecated and will be removed in" + " future versions of ayon-core addon." + "\nReading from Session won't give you updated information and changing" + " values won't affect global state of a process." ) def session_data_from_environment(context_keys=False): - session_data = {} - if context_keys: - for key in SESSION_CONTEXT_KEYS: - value = os.environ.get(key) - session_data[key] = value or "" - else: - for key in SESSION_CONTEXT_KEYS: - session_data[key] = None - - for key, default_value in ( - # Name of Avalon in graphical user interfaces - # Use this to customise the visual appearance of Avalon - # to better integrate with your surrounding pipeline - ("AVALON_LABEL", "Avalon"), - - # Used during any connections to the outside world - ("AVALON_TIMEOUT", "1000"), - - # Name of database used in MongoDB - ("AVALON_DB", "avalon"), - ): - value = os.environ.get(key) or default_value - if value is not None: - session_data[key] = value - - return session_data + return {} def is_installed(): - return module._is_installed + return False def install(): - """Establish a persistent connection to the database""" - if is_installed(): - return - - session = session_data_from_environment(context_keys=True) - - session["schema"] = "openpype:session-4.0" - try: - schema.validate(session) - except schema.ValidationError as e: - # TODO(marcus): Make this mandatory - log.warning(e) - - Session.update(session) - - module._is_installed = True + pass def uninstall(): - """Close any connection to the database. - - Deprecated: - This function does nothing should be removed. - """ - module._is_installed = False + pass -def requires_install(func): - @functools.wraps(func) - def decorated(*args, **kwargs): - if not is_installed(): - install() - return func(*args, **kwargs) - return decorated - - -@requires_install def active_project(*args, **kwargs): - return Session["AVALON_PROJECT"] + return get_current_project_name() def current_project(*args, **kwargs): - return Session.get("AVALON_PROJECT") + return get_current_project_name() diff --git a/client/ayon_core/pipeline/load/plugins.py b/client/ayon_core/pipeline/load/plugins.py index e13260d296..3b60e357af 100644 --- a/client/ayon_core/pipeline/load/plugins.py +++ b/client/ayon_core/pipeline/load/plugins.py @@ -1,11 +1,8 @@ import os import logging -from ayon_core.settings import get_system_settings, get_project_settings -from ayon_core.pipeline import ( - schema, - legacy_io, -) +from ayon_core.settings import get_project_settings +from ayon_core.pipeline import schema from ayon_core.pipeline.plugin_discover import ( discover, register_plugin, @@ -40,8 +37,8 @@ class LoaderPlugin(list): log.propagate = True @classmethod - def apply_settings(cls, project_settings, system_settings): - host_name = os.environ.get("AVALON_APP") + def apply_settings(cls, project_settings): + host_name = os.environ.get("AYON_HOST_NAME") plugin_type = "load" plugin_type_settings = ( project_settings @@ -50,7 +47,7 @@ class LoaderPlugin(list): ) global_type_settings = ( project_settings - .get("global", {}) + .get("core", {}) .get(plugin_type, {}) ) if not global_type_settings and not plugin_type_settings: @@ -195,13 +192,13 @@ class LoaderPlugin(list): raise NotImplementedError("Loader.load() must be " "implemented by subclass") - def update(self, container, representation): + def update(self, container, context): """Update `container` to `representation` - Arguments: + Args: container (avalon-core:container-1.0): Container to update, from `host.ls()`. - representation (dict): Update the container to this representation. + context (dict): Update the container to this representation. """ raise NotImplementedError("Loader.update() must be " @@ -228,7 +225,7 @@ class LoaderPlugin(list): Returns static (cls) options or could collect from 'contexts'. Args: - contexts (list): of repre or subset contexts + contexts (list): of repre or product contexts Returns: (list) """ @@ -249,7 +246,7 @@ class LoaderPlugin(list): class SubsetLoaderPlugin(LoaderPlugin): - """Load subset into host application + """Load product into host application Arguments: context (dict): avalon-core:context-1.0 name (str, optional): Use pre-defined name @@ -265,11 +262,10 @@ def discover_loader_plugins(project_name=None): plugins = discover(LoaderPlugin) if not project_name: project_name = get_current_project_name() - system_settings = get_system_settings() project_settings = get_project_settings(project_name) for plugin in plugins: try: - plugin.apply_settings(project_settings, system_settings) + plugin.apply_settings(project_settings) except Exception: log.warning( "Failed to apply settings to loader {}".format( diff --git a/client/ayon_core/pipeline/load/utils.py b/client/ayon_core/pipeline/load/utils.py index 056836d712..e2bc1c7a26 100644 --- a/client/ayon_core/pipeline/load/utils.py +++ b/client/ayon_core/pipeline/load/utils.py @@ -1,7 +1,6 @@ import os import platform import copy -import getpass import logging import inspect import collections @@ -11,7 +10,9 @@ from ayon_core.host import ILoadHost from ayon_core.client import ( get_project, get_assets, + get_asset_by_id, get_subsets, + get_subset_by_id, get_versions, get_version_by_id, get_last_version_by_subset_id, @@ -481,6 +482,8 @@ def update_container(container, version=-1): new_version = get_version_by_name( project_name, version, current_version["parent"], fields=["_id"] ) + subset_doc = get_subset_by_id(project_name, current_version["parent"]) + asset_doc = get_asset_by_id(project_name, subset_doc["parent"]) assert new_version is not None, "This is a bug" @@ -499,8 +502,19 @@ def update_container(container, version=-1): "Can't update container because loader '{}' was not found." .format(container.get("loader")) ) + project_doc = get_project(project_name) + context = { + "project": { + "name": project_doc["name"], + "code": project_doc["data"]["code"], + }, + "asset": asset_doc, + "subset": subset_doc, + "version": new_version, + "representation": new_representation, + } - return Loader().update(container, new_representation) + return Loader().update(container, context) def switch_container(container, representation, loader_plugin=None): @@ -549,7 +563,7 @@ def switch_container(container, representation, loader_plugin=None): loader = loader_plugin(new_context) - return loader.switch(container, new_representation) + return loader.switch(container, new_context) def get_representation_path_from_context(context): diff --git a/client/ayon_core/pipeline/project_folders.py b/client/ayon_core/pipeline/project_folders.py index ad205522a6..811a98ce4b 100644 --- a/client/ayon_core/pipeline/project_folders.py +++ b/client/ayon_core/pipeline/project_folders.py @@ -104,7 +104,7 @@ def _list_path_items(folder_structure): def get_project_basic_paths(project_name): project_settings = get_project_settings(project_name) folder_structure = ( - project_settings["global"]["project_folder_structure"] + project_settings["core"]["project_folder_structure"] ) if not folder_structure: return [] diff --git a/client/ayon_core/pipeline/publish/abstract_collect_render.py b/client/ayon_core/pipeline/publish/abstract_collect_render.py index 764532cadb..745632ca0a 100644 --- a/client/ayon_core/pipeline/publish/abstract_collect_render.py +++ b/client/ayon_core/pipeline/publish/abstract_collect_render.py @@ -28,10 +28,12 @@ class RenderInstance(object): time = attr.ib() # time of instance creation (get_formatted_current_time) source = attr.ib() # path to source scene file label = attr.ib() # label to show in GUI - subset = attr.ib() # subset name + family = attr.ib() # product type for pyblish filtering + productType = attr.ib() # product type + productName = attr.ib() # product name + folderPath = attr.ib() # folder path task = attr.ib() # task name - asset = attr.ib() # asset name - attachTo = attr.ib() # subset name to attach render to + attachTo = attr.ib() # product name to attach render to setMembers = attr.ib() # list of nodes/members producing render output publish = attr.ib() # bool, True to publish instance name = attr.ib() # instance name @@ -60,7 +62,7 @@ class RenderInstance(object): review = attr.ib(default=None) # False - explicitly skip review priority = attr.ib(default=50) # job priority on farm - family = attr.ib(default="renderlayer") + # family = attr.ib(default="renderlayer") families = attr.ib(default=["renderlayer"]) # list of families # True if should be rendered on farm, eg not integrate farm = attr.ib(default=False) @@ -153,13 +155,13 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): exp_files = self.get_expected_files(render_instance) assert exp_files, "no file names were generated, this is bug" - # if we want to attach render to subset, check if we have AOV's + # if we want to attach render to product, check if we have AOV's # in expectedFiles. If so, raise error as we cannot attach AOV - # (considered to be subset on its own) to another subset + # (considered to be product on its own) to another product if render_instance.attachTo: assert isinstance(exp_files, list), ( "attaching multiple AOVs or renderable cameras to " - "subset is not supported" + "product is not supported" ) frame_start_render = int(render_instance.frameStart) diff --git a/client/ayon_core/pipeline/publish/lib.py b/client/ayon_core/pipeline/publish/lib.py index 47f4be9e69..b4ed69b5d7 100644 --- a/client/ayon_core/pipeline/publish/lib.py +++ b/client/ayon_core/pipeline/publish/lib.py @@ -13,12 +13,8 @@ from ayon_core.lib import ( Logger, import_filepath, filter_profiles, - is_func_signature_supported, -) -from ayon_core.settings import ( - get_project_settings, - get_system_settings, ) +from ayon_core.settings import get_project_settings from ayon_core.pipeline import ( tempdir, Anatomy @@ -60,7 +56,7 @@ def get_template_name_profiles( return copy.deepcopy( project_settings - ["global"] + ["core"] ["tools"] ["publish"] ["template_name_profiles"] @@ -95,7 +91,7 @@ def get_hero_template_name_profiles( return copy.deepcopy( project_settings - ["global"] + ["core"] ["tools"] ["publish"] ["hero_template_name_profiles"] @@ -105,7 +101,7 @@ def get_hero_template_name_profiles( def get_publish_template_name( project_name, host_name, - family, + product_type, task_name, task_type, project_settings=None, @@ -123,7 +119,7 @@ def get_publish_template_name( Args: project_name (str): Name of project where to look for settings. host_name (str): Name of host integration. - family (str): Family for which should be found template. + product_type (str): Product type for which should be found template. task_name (str): Task name on which is instance working. task_type (str): Task type on which is instance working. project_settings (Dict[str, Any]): Prepared project settings. @@ -138,7 +134,7 @@ def get_publish_template_name( template = None filter_criteria = { "hosts": host_name, - "families": family, + "product_types": product_type, "task_names": task_name, "task_types": task_type, } @@ -383,7 +379,7 @@ def get_plugin_settings(plugin, project_settings, log, category=None): # TODO: change after all plugins are moved one level up if category_from_file in ("ayon_core", "openpype"): - category_from_file = "global" + category_from_file = "core" try: return ( @@ -421,8 +417,8 @@ def apply_plugin_settings_automatically(plugin, settings, logger=None): def filter_pyblish_plugins(plugins): """Pyblish plugin filter which applies AYON settings. - Apply OpenPype settings on discovered plugins. On plugin with implemented - class method 'def apply_settings(cls, project_settings, system_settings)' + Apply settings on discovered plugins. On plugin with implemented + class method 'def apply_settings(cls, project_settings)' is called the method. Default behavior looks for plugin name and current host name to look for @@ -437,10 +433,9 @@ def filter_pyblish_plugins(plugins): # - kept becau on farm is probably used host 'shell' which propably # affect how settings are applied there host_name = pyblish.api.current_host() - project_name = os.environ.get("AVALON_PROJECT") + project_name = os.environ.get("AYON_PROJECT_NAME") project_settings = get_project_settings(project_name) - system_settings = get_system_settings() # iterate over plugins for plugin in plugins[:]: @@ -452,19 +447,7 @@ def filter_pyblish_plugins(plugins): # - can be used to target settings from custom settings place # - skip default behavior when successful try: - # Support to pass only project settings - # - make sure that both settings are passed, when can be - # - that covers cases when *args are in method parameters - both_supported = is_func_signature_supported( - apply_settings_func, project_settings, system_settings - ) - project_supported = is_func_signature_supported( - apply_settings_func, project_settings - ) - if not both_supported and project_supported: - plugin.apply_settings(project_settings) - else: - plugin.apply_settings(project_settings, system_settings) + plugin.apply_settings(project_settings) except Exception: log.warning( @@ -485,26 +468,6 @@ def filter_pyblish_plugins(plugins): plugins.remove(plugin) -def remote_publish(log): - """Loops through all plugins, logs to console. Used for tests. - - Args: - log (Logger) - """ - - # Error exit as soon as any error occurs. - error_format = "Failed {plugin.__name__}: {error}\n{error.traceback}" - - for result in pyblish.util.publish_iter(): - if not result["error"]: - continue - - error_message = error_format.format(**result) - log.error(error_message) - # 'Fatal Error: ' is because of Deadline - raise RuntimeError("Fatal Error: {}".format(error_message)) - - def get_errored_instances_from_context(context, plugin=None): """Collect failed instances from pyblish context. @@ -721,19 +684,26 @@ def get_publish_repre_path(instance, repre, only_published=False): return None -def get_custom_staging_dir_info(project_name, host_name, family, task_name, - task_type, subset_name, - project_settings=None, - anatomy=None, log=None): +def get_custom_staging_dir_info( + project_name, + host_name, + product_type, + task_name, + task_type, + product_name, + project_settings=None, + anatomy=None, + log=None +): """Checks profiles if context should use special custom dir as staging. Args: project_name (str) host_name (str) - family (str) + product_type (str) task_name (str) task_type (str) - subset_name (str) + product_name (str) project_settings(Dict[str, Any]): Prepared project settings. anatomy (Dict[str, Any]) log (Logger) (optional) @@ -744,7 +714,7 @@ def get_custom_staging_dir_info(project_name, host_name, family, task_name, ValueError - if misconfigured template should be used """ settings = project_settings or get_project_settings(project_name) - custom_staging_dir_profiles = (settings["global"] + custom_staging_dir_profiles = (settings["core"] ["tools"] ["publish"] ["custom_staging_dir_profiles"]) @@ -756,10 +726,10 @@ def get_custom_staging_dir_info(project_name, host_name, family, task_name, filtering_criteria = { "hosts": host_name, - "families": family, + "families": product_type, "task_names": task_name, "task_types": task_type, - "subsets": subset_name + "subsets": product_name } profile = filter_profiles(custom_staging_dir_profiles, filtering_criteria, @@ -800,18 +770,18 @@ def _validate_transient_template(project_name, template_name, anatomy): def get_published_workfile_instance(context): """Find workfile instance in context""" for i in context: - is_workfile = ( - "workfile" in i.data.get("families", []) or - i.data["family"] == "workfile" - ) - if not is_workfile: - continue - # test if there is instance of workfile waiting # to be published. if not i.data.get("publish", True): continue + if not ( + i.data["productType"] == "workfile" + # QUESTION Is check in 'families' valid? + or "workfile" in i.data.get("families", []) + ): + continue + return i @@ -937,7 +907,7 @@ def get_publish_instance_label(instance): is used string conversion of instance object -> 'instance._name'. Todos: - Maybe 'subset' key could be used too. + Maybe 'productName' key could be used too. Args: instance (pyblish.api.Instance): Pyblish instance. @@ -956,8 +926,8 @@ def get_publish_instance_label(instance): def get_publish_instance_families(instance): """Get all families of the instance. - Look for families under 'family' and 'families' keys in instance data. - Value of 'family' is used as first family and then all other families + Look for families under 'productType' and 'families' keys in instance data. + Value of 'productType' is used as first family and then all other families in random order. Args: diff --git a/client/ayon_core/pipeline/schema/container-3.0.json b/client/ayon_core/pipeline/schema/container-3.0.json new file mode 100644 index 0000000000..c9227bab11 --- /dev/null +++ b/client/ayon_core/pipeline/schema/container-3.0.json @@ -0,0 +1,59 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "ayon:container-3.0", + "description": "A loaded asset", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "id", + "objectName", + "name", + "namespace", + "loader", + "representation" + ], + "properties": { + "schema": { + "description": "Schema identifier for payload", + "type": "string", + "enum": ["ayon:container-3.0"], + "example": "ayon:container-3.0" + }, + "id": { + "description": "Identifier for finding object in host", + "type": "string", + "enum": ["ayon.load.container"], + "example": "ayon.load.container" + }, + "objectName": { + "description": "Name of internal object, such as the objectSet in Maya.", + "type": "string", + "example": "Bruce_:rigDefault_CON" + }, + "loader": { + "description": "Name of loader plug-in used to produce this container", + "type": "string", + "example": "ModelLoader" + }, + "name": { + "description": "Internal object name of container in application", + "type": "string", + "example": "modelDefault_01" + }, + "namespace": { + "description": "Internal namespace of container in application", + "type": "string", + "example": "Bruce_" + }, + "representation": { + "description": "Unique id of representation in database", + "type": "string", + "example": "59523f355f8c1b5f6c5e8348" + } + } +} diff --git a/client/ayon_core/pipeline/template_data.py b/client/ayon_core/pipeline/template_data.py index a1b944a431..e9c57521d4 100644 --- a/client/ayon_core/pipeline/template_data.py +++ b/client/ayon_core/pipeline/template_data.py @@ -1,9 +1,9 @@ from ayon_core.client import get_project, get_asset_by_name -from ayon_core.settings import get_system_settings +from ayon_core.settings import get_studio_settings from ayon_core.lib.local_settings import get_ayon_username -def get_general_template_data(system_settings=None): +def get_general_template_data(settings=None): """General template data based on system settings or machine. Output contains formatting keys: @@ -12,17 +12,16 @@ def get_general_template_data(system_settings=None): - 'user' - User's name using 'get_ayon_username' Args: - system_settings (Dict[str, Any]): System settings. + settings (Dict[str, Any]): Studio or project settings. """ - if not system_settings: - system_settings = get_system_settings() - studio_name = system_settings["general"]["studio_name"] - studio_code = system_settings["general"]["studio_code"] + if not settings: + settings = get_studio_settings() + core_settings = settings["core"] return { "studio": { - "name": studio_name, - "code": studio_code + "name": core_settings["studio_name"], + "code": core_settings["studio_code"] }, "user": get_ayon_username() } @@ -155,7 +154,7 @@ def get_template_data( asset_doc=None, task_name=None, host_name=None, - system_settings=None + settings=None ): """Prepare data for templates filling from entered documents and info. @@ -175,14 +174,14 @@ def get_template_data( asset_doc (Dict[str, Any]): Mongo document of asset from MongoDB. task_name (Union[str, None]): Task name under passed asset. host_name (Union[str, None]): Used to fill '{app}' key. - system_settings (Union[Dict, None]): Prepared system settings. + settings (Union[Dict, None]): Prepared studio or project settings. They're queried if not passed (may be slower). Returns: Dict[str, Any]: Data prepared for filling workdir template. """ - template_data = get_general_template_data(system_settings) + template_data = get_general_template_data(settings) template_data.update(get_project_template_data(project_doc)) if asset_doc: template_data.update(get_asset_template_data( @@ -204,7 +203,7 @@ def get_template_data_with_names( asset_name=None, task_name=None, host_name=None, - system_settings=None + settings=None ): """Prepare data for templates filling from entered entity names and info. @@ -219,7 +218,7 @@ def get_template_data_with_names( task_name (Union[str, None]): Task name under passed asset. host_name (Union[str, None]):Used to fill '{app}' key. because workdir template may contain `{app}` key. - system_settings (Union[Dict, None]): Prepared system settings. + settings (Union[Dict, None]): Prepared studio or project settings. They're queried if not passed. Returns: @@ -237,5 +236,5 @@ def get_template_data_with_names( fields=["name", "data.parents", "data.tasks"] ) return get_template_data( - project_doc, asset_doc, task_name, host_name, system_settings + project_doc, asset_doc, task_name, host_name, settings ) diff --git a/client/ayon_core/lib/usdlib.py b/client/ayon_core/pipeline/usdlib.py similarity index 100% rename from client/ayon_core/lib/usdlib.py rename to client/ayon_core/pipeline/usdlib.py diff --git a/client/ayon_core/pipeline/version_start.py b/client/ayon_core/pipeline/version_start.py index bd7d800335..7ee20a5dd4 100644 --- a/client/ayon_core/pipeline/version_start.py +++ b/client/ayon_core/pipeline/version_start.py @@ -7,8 +7,8 @@ def get_versioning_start( host_name, task_name=None, task_type=None, - family=None, - subset=None, + product_type=None, + product_name=None, project_settings=None, ): """Get anatomy versioning start""" @@ -16,18 +16,20 @@ def get_versioning_start( project_settings = get_project_settings(project_name) version_start = 1 - settings = project_settings["global"] + settings = project_settings["core"] profiles = settings.get("version_start_category", {}).get("profiles", []) if not profiles: return version_start + # TODO use 'product_types' and 'product_name' instead of + # 'families' and 'subsets' filtering_criteria = { "host_names": host_name, - "families": family, + "families": product_type, "task_names": task_name, "task_types": task_type, - "subsets": subset + "subsets": product_name } profile = filter_profiles(profiles, filtering_criteria) diff --git a/client/ayon_core/pipeline/workfile/build_workfile.py b/client/ayon_core/pipeline/workfile/build_workfile.py index c62facaaa9..34d8ef0c8f 100644 --- a/client/ayon_core/pipeline/workfile/build_workfile.py +++ b/client/ayon_core/pipeline/workfile/build_workfile.py @@ -48,18 +48,18 @@ class BuildWorkfile: return self._log @staticmethod - def map_subsets_by_family(subsets): - subsets_by_family = collections.defaultdict(list) - for subset in subsets: - family = subset["data"].get("family") - if not family: - families = subset["data"].get("families") + def map_products_by_type(subset_docs): + products_by_type = collections.defaultdict(list) + for subset_doc in subset_docs: + product_type = subset_doc["data"].get("family") + if not product_type: + families = subset_doc["data"].get("families") if not families: continue - family = families[0] + product_type = families[0] - subsets_by_family[family].append(subset) - return subsets_by_family + products_by_type[product_type].append(subset_doc) + return products_by_type def process(self): """Main method of this wrapper. @@ -80,17 +80,17 @@ class BuildWorkfile: stored in Workfile profiles from presets. Profiles are set by host, filtered by current task name and used by families. - Each family can specify representation names and loaders for + Each product type can specify representation names and loaders for representations and first available and successful loaded representation is returned as container. At the end you'll get list of loaded containers per each asset. loaded_containers [{ - "asset_entity": , + "asset_doc": , "containers": [, , ...] }, { - "asset_entity": , + "asset_doc": , "containers": [, ...] }, { ... @@ -110,14 +110,14 @@ class BuildWorkfile: # Get current asset name and entity project_name = get_current_project_name() - current_asset_name = get_current_asset_name() - current_asset_entity = get_asset_by_name( - project_name, current_asset_name + current_folder_path = get_current_asset_name() + current_asset_doc = get_asset_by_name( + project_name, current_folder_path ) # Skip if asset was not found - if not current_asset_entity: - print("Asset entity with name `{}` was not found".format( - current_asset_name + if not current_asset_doc: + print("Folder entity `{}` was not found".format( + current_folder_path )) return loaded_containers @@ -143,7 +143,7 @@ class BuildWorkfile: # Load workfile presets for task self.build_presets = self.get_build_presets( - current_task_name, current_asset_entity + current_task_name, current_asset_doc ) # Skip if there are any presets for task @@ -155,9 +155,9 @@ class BuildWorkfile: ) return loaded_containers - # Get presets for loading current asset + # Get presets for loading current folder current_context_profiles = self.build_presets.get("current_context") - # Get presets for loading linked assets + # Get presets for loading linked folders link_context_profiles = self.build_presets.get("linked_assets") # Skip if both are missing if not current_context_profiles and not link_context_profiles: @@ -177,38 +177,38 @@ class BuildWorkfile: elif not link_context_profiles: self.log.warning(( "Current task `{}` doesn't have any" - "loading preset for it's linked assets." + "loading preset for it's linked folders." ).format(current_task_name)) # Prepare assets to process by workfile presets - assets = [] - current_asset_id = None + asset_docs = [] + current_folder_id = None if current_context_profiles: # Add current asset entity if preset has current context set - assets.append(current_asset_entity) - current_asset_id = current_asset_entity["_id"] + asset_docs.append(current_asset_doc) + current_folder_id = current_asset_doc["_id"] if link_context_profiles: # Find and append linked assets if preset has set linked mapping - link_assets = get_linked_assets(project_name, current_asset_entity) + link_assets = get_linked_assets(project_name, current_asset_doc) if link_assets: - assets.extend(link_assets) + asset_docs.extend(link_assets) # Skip if there are no assets. This can happen if only linked mapping # is set and there are no links for his asset. - if not assets: + if not asset_docs: self.log.warning( "Asset does not have linked assets. Nothing to process." ) return loaded_containers # Prepare entities from database for assets - prepared_entities = self._collect_last_version_repres(assets) + prepared_entities = self._collect_last_version_repres(asset_docs) # Load containers by prepared entities and presets # - Current asset containers - if current_asset_id and current_asset_id in prepared_entities: - current_context_data = prepared_entities.pop(current_asset_id) + if current_folder_id and current_folder_id in prepared_entities: + current_context_data = prepared_entities.pop(current_folder_id) loaded_data = self.load_containers_by_asset_data( current_context_data, current_context_profiles, loaders_by_name ) @@ -229,8 +229,8 @@ class BuildWorkfile: def get_build_presets(self, task_name, asset_doc): """ Returns presets to build workfile for task name. - Presets are loaded for current project set in - io.Session["AVALON_PROJECT"], filtered by registered host + Presets are loaded for current project received by + 'get_current_project_name', filtered by registered host and entered task name. Args: @@ -281,7 +281,7 @@ class BuildWorkfile: with valid values. - "loaders" expects list of strings representing possible loaders. - "families" expects list of strings for filtering - by main subset family. + by product type. - "repre_names" expects list of strings for filtering by representation name. @@ -321,7 +321,7 @@ class BuildWorkfile: continue # Check families - profile_families = profile.get("families") + profile_families = profile.get("product_types") if not profile_families: self.log.warning(( "Build profile is missing families configuration: {0}" @@ -338,7 +338,7 @@ class BuildWorkfile: continue # Prepare lowered families and representation names - profile["families_lowered"] = [ + profile["product_types_lowered"] = [ fam.lower() for fam in profile_families ] profile["repre_names_lowered"] = [ @@ -349,37 +349,37 @@ class BuildWorkfile: return valid_profiles - def _prepare_profile_for_subsets(self, subsets, profiles): - """Select profile for each subset by it's data. + def _prepare_profile_for_products(self, subset_docs, profiles): + """Select profile for each product by it's data. - Profiles are filtered for each subset individually. - Profile is filtered by subset's family, optionally by name regex and + Profiles are filtered for each product individually. + Profile is filtered by product type, optionally by name regex and representation names set in profile. - It is possible to not find matching profile for subset, in that case - subset is skipped and it is possible that none of subsets have + It is possible to not find matching profile for product, in that case + product is skipped and it is possible that none of products have matching profile. Args: - subsets (List[Dict[str, Any]]): Subset documents. + subset_docs (List[Dict[str, Any]]): Subset documents. profiles (List[Dict[str, Any]]): Build profiles. Returns: - Dict[str, Any]: Profile by subset's id. + Dict[str, Any]: Profile by product id. """ - # Prepare subsets - subsets_by_family = self.map_subsets_by_family(subsets) + # Prepare products + products_by_type = self.map_products_by_type(subset_docs) - profiles_per_subset_id = {} - for family, subsets in subsets_by_family.items(): - family_low = family.lower() + profiles_by_product_id = {} + for product_type, subset_docs in products_by_type.items(): + product_type_low = product_type.lower() for profile in profiles: - # Skip profile if does not contain family - if family_low not in profile["families_lowered"]: + # Skip profile if does not contain product type + if product_type_low not in profile["product_types_lowered"]: continue # Precompile name filters as regexes - profile_regexes = profile.get("subset_name_filters") + profile_regexes = profile.get("product_name_filters") if profile_regexes: _profile_regexes = [] for regex in profile_regexes: @@ -387,31 +387,31 @@ class BuildWorkfile: profile_regexes = _profile_regexes # TODO prepare regex compilation - for subset in subsets: + for subset_doc in subset_docs: # Verify regex filtering (optional) if profile_regexes: valid = False for pattern in profile_regexes: - if re.match(pattern, subset["name"]): + if re.match(pattern, subset_doc["name"]): valid = True break if not valid: continue - profiles_per_subset_id[subset["_id"]] = profile + profiles_by_product_id[subset_doc["_id"]] = profile # break profiles loop on finding the first matching profile break - return profiles_per_subset_id + return profiles_by_product_id def load_containers_by_asset_data( - self, asset_entity_data, build_profiles, loaders_by_name + self, asset_doc_data, build_profiles, loaders_by_name ): """Load containers for entered asset entity by Build profiles. Args: - asset_entity_data (Dict[str, Any]): Prepared data with subsets, + asset_doc_data (Dict[str, Any]): Prepared data with products, last versions and representations for specific asset. build_profiles (Dict[str, Any]): Build profiles. loaders_by_name (Dict[str, LoaderPlugin]): Available loaders @@ -423,10 +423,10 @@ class BuildWorkfile: """ # Make sure all data are not empty - if not asset_entity_data or not build_profiles or not loaders_by_name: + if not asset_doc_data or not build_profiles or not loaders_by_name: return - asset_entity = asset_entity_data["asset_entity"] + asset_doc = asset_doc_data["asset_doc"] valid_profiles = self._filter_build_profiles( build_profiles, loaders_by_name @@ -439,53 +439,53 @@ class BuildWorkfile: self.log.debug("Valid Workfile profiles: {}".format(valid_profiles)) - subsets_by_id = {} - version_by_subset_id = {} + products_by_id = {} + version_by_product_id = {} repres_by_version_id = {} - for subset_id, in_data in asset_entity_data["subsets"].items(): - subset_entity = in_data["subset_entity"] - subsets_by_id[subset_entity["_id"]] = subset_entity + for product_id, in_data in asset_doc_data["subsets"].items(): + subset_doc = in_data["subset_doc"] + products_by_id[subset_doc["_id"]] = subset_doc version_data = in_data["version"] - version_entity = version_data["version_entity"] - version_by_subset_id[subset_id] = version_entity - repres_by_version_id[version_entity["_id"]] = ( + version_doc = version_data["version_doc"] + version_by_product_id[product_id] = version_doc + repres_by_version_id[version_doc["_id"]] = ( version_data["repres"] ) - if not subsets_by_id: - self.log.warning("There are not subsets for asset {0}".format( - asset_entity["name"] + if not products_by_id: + self.log.warning("There are not products for folder {0}".format( + asset_doc["name"] )) return - profiles_per_subset_id = self._prepare_profile_for_subsets( - subsets_by_id.values(), valid_profiles + profiles_by_product_id = self._prepare_profile_for_products( + products_by_id.values(), valid_profiles ) - if not profiles_per_subset_id: - self.log.warning("There are not valid subsets.") + if not profiles_by_product_id: + self.log.warning("There are not valid products.") return - valid_repres_by_subset_id = collections.defaultdict(list) - for subset_id, profile in profiles_per_subset_id.items(): + valid_repres_by_product_id = collections.defaultdict(list) + for product_id, profile in profiles_by_product_id.items(): profile_repre_names = profile["repre_names_lowered"] - version_entity = version_by_subset_id[subset_id] - version_id = version_entity["_id"] + version_doc = version_by_product_id[product_id] + version_id = version_doc["_id"] repres = repres_by_version_id[version_id] for repre in repres: repre_name_low = repre["name"].lower() if repre_name_low in profile_repre_names: - valid_repres_by_subset_id[subset_id].append(repre) + valid_repres_by_product_id[product_id].append(repre) # DEBUG message - msg = "Valid representations for Asset: `{}`".format( - asset_entity["name"] + msg = "Valid representations for Folder: `{}`".format( + asset_doc["name"] ) - for subset_id, repres in valid_repres_by_subset_id.items(): - subset = subsets_by_id[subset_id] - msg += "\n# Subset Name/ID: `{}`/{}".format( - subset["name"], subset_id + for product_id, repres in valid_repres_by_product_id.items(): + subset_doc = products_by_id[product_id] + msg += "\n# Product Name/ID: `{}`/{}".format( + subset_doc["name"], product_id ) for repre in repres: msg += "\n## Repre name: `{}`".format(repre["name"]) @@ -493,37 +493,37 @@ class BuildWorkfile: self.log.debug(msg) containers = self._load_containers( - valid_repres_by_subset_id, subsets_by_id, - profiles_per_subset_id, loaders_by_name + valid_repres_by_product_id, products_by_id, + profiles_by_product_id, loaders_by_name ) return { - "asset_entity": asset_entity, + "asset_doc": asset_doc, "containers": containers } def _load_containers( - self, repres_by_subset_id, subsets_by_id, - profiles_per_subset_id, loaders_by_name + self, repres_by_product_id, products_by_id, + profiles_by_product_id, loaders_by_name ): """Real load by collected data happens here. - Loading of representations per subset happens here. Each subset can + Loading of representations per product happens here. Each product can loads one representation. Loading is tried in specific order. Representations are tried to load by names defined in configuration. - If subset has representation matching representation name each loader + If product has representation matching representation name each loader is tried to load it until any is successful. If none of them was successful then next representation name is tried. Subset process loop ends when any representation is loaded or all matching representations were already tried. Args: - repres_by_subset_id (Dict[str, Dict[str, Any]]): Available - representations mapped by their parent (subset) id. - subsets_by_id (Dict[str, Dict[str, Any]]): Subset documents + repres_by_product_id (Dict[str, Dict[str, Any]]): Available + representations mapped by their parent (product) id. + products_by_id (Dict[str, Dict[str, Any]]): Subset documents mapped by their id. - profiles_per_subset_id (Dict[str, Dict[str, Any]]): Build profiles - mapped by subset id. + profiles_by_product_id (Dict[str, Dict[str, Any]]): Build profiles + mapped by product id. loaders_by_name (Dict[str, LoaderPlugin]): Available loaders per name. @@ -533,38 +533,40 @@ class BuildWorkfile: loaded_containers = [] - # Get subset id order from build presets. + # Get product id order from build presets. build_presets = self.build_presets.get("current_context", []) build_presets += self.build_presets.get("linked_assets", []) - subset_ids_ordered = [] + product_ids_ordered = [] for preset in build_presets: - for preset_family in preset["families"]: - for id, subset in subsets_by_id.items(): - if preset_family not in subset["data"].get("families", []): + for product_type in preset["product_types"]: + for product_id, subset_doc in products_by_id.items(): + # TODO 'families' is not available on product + families = subset_doc["data"].get("families") or [] + if product_type not in families: continue - subset_ids_ordered.append(id) + product_ids_ordered.append(product_id) - # Order representations from subsets. - print("repres_by_subset_id", repres_by_subset_id) + # Order representations from products. + print("repres_by_product_id", repres_by_product_id) representations_ordered = [] representations = [] - for id in subset_ids_ordered: - for subset_id, repres in repres_by_subset_id.items(): + for ordered_product_id in product_ids_ordered: + for product_id, repres in repres_by_product_id.items(): if repres in representations: continue - if id == subset_id: - representations_ordered.append((subset_id, repres)) + if ordered_product_id == product_id: + representations_ordered.append((product_id, repres)) representations.append(repres) print("representations", representations) # Load ordered representations. - for subset_id, repres in representations_ordered: - subset_name = subsets_by_id[subset_id]["name"] + for product_id, repres in representations_ordered: + product_name = products_by_id[product_id]["name"] - profile = profiles_per_subset_id[subset_id] + profile = profiles_by_product_id[product_id] loaders_last_idx = len(profile["loaders"]) - 1 repre_names_last_idx = len(profile["repre_names_lowered"]) - 1 @@ -595,7 +597,7 @@ class BuildWorkfile: container = load_container( loader, repre["_id"], - name=subset_name + name=product_name ) loaded_containers.append(container) is_loaded = True @@ -618,8 +620,8 @@ class BuildWorkfile: msg += " Trying next loader." elif repre_name_idx < repre_names_last_idx: msg += ( - " Loading of subset `{}` was not successful." - ).format(subset_name) + " Loading of product `{}` was not successful." + ).format(product_name) else: msg += " Trying next representation." self.log.info(msg) @@ -627,7 +629,7 @@ class BuildWorkfile: return loaded_containers def _collect_last_version_repres(self, asset_docs): - """Collect subsets, versions and representations for asset_entities. + """Collect products, versions and representations for asset_entities. Args: asset_docs (List[Dict[str, Any]]): Asset entities for which @@ -640,12 +642,12 @@ class BuildWorkfile: ``` { {Asset ID}: { - "asset_entity": , + "asset_doc": , "subsets": { {Subset ID}: { - "subset_entity": , + "subset_doc": , "version": { - "version_entity": , + "version_doc": , "repres": [ , , ... ] @@ -656,7 +658,7 @@ class BuildWorkfile: }, ... } - output[asset_id]["subsets"][subset_id]["version"]["repres"] + output[folder_id]["subsets"][product_id]["version"]["repres"] ``` """ @@ -666,20 +668,26 @@ class BuildWorkfile: if not asset_docs: return output - asset_docs_by_ids = {asset["_id"]: asset for asset in asset_docs} + asset_docs_by_ids = { + asset_doc["_id"]: asset_doc + for asset_doc in asset_docs + } project_name = get_current_project_name() - subsets = list(get_subsets( + subset_docs = list(get_subsets( project_name, asset_ids=asset_docs_by_ids.keys() )) - subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} + subset_docs_by_id = { + subset_doc["_id"]: subset_doc + for subset_doc in subset_docs + } - last_version_by_subset_id = get_last_versions( - project_name, subset_entity_by_ids.keys() + last_version_by_product_id = get_last_versions( + project_name, subset_docs_by_id.keys() ) last_version_docs_by_id = { version["_id"]: version - for version in last_version_by_subset_id.values() + for version in last_version_by_product_id.values() } repre_docs = get_representations( project_name, version_ids=last_version_docs_by_id.keys() @@ -689,28 +697,28 @@ class BuildWorkfile: version_id = repre_doc["parent"] version_doc = last_version_docs_by_id[version_id] - subset_id = version_doc["parent"] - subset_doc = subset_entity_by_ids[subset_id] + product_id = version_doc["parent"] + subset_doc = subset_docs_by_id[product_id] - asset_id = subset_doc["parent"] - asset_doc = asset_docs_by_ids[asset_id] + folder_id = subset_doc["parent"] + asset_doc = asset_docs_by_ids[folder_id] - if asset_id not in output: - output[asset_id] = { - "asset_entity": asset_doc, + if folder_id not in output: + output[folder_id] = { + "asset_doc": asset_doc, "subsets": {} } - if subset_id not in output[asset_id]["subsets"]: - output[asset_id]["subsets"][subset_id] = { - "subset_entity": subset_doc, + if product_id not in output[folder_id]["subsets"]: + output[folder_id]["subsets"][product_id] = { + "subset_doc": subset_doc, "version": { - "version_entity": version_doc, + "version_doc": version_doc, "repres": [] } } - output[asset_id]["subsets"][subset_id]["version"]["repres"].append( + output[folder_id]["subsets"][product_id]["version"]["repres"].append( repre_doc ) diff --git a/client/ayon_core/pipeline/workfile/lock_workfile.py b/client/ayon_core/pipeline/workfile/lock_workfile.py index a6d4348966..7eab3f36dc 100644 --- a/client/ayon_core/pipeline/workfile/lock_workfile.py +++ b/client/ayon_core/pipeline/workfile/lock_workfile.py @@ -64,7 +64,7 @@ def is_workfile_lock_enabled(host_name, project_name, project_setting=None): project_setting = get_project_settings(project_name) workfile_lock_profiles = ( project_setting - ["global"] + ["core"] ["tools"] ["Workfiles"] ["workfile_lock_profiles"]) diff --git a/client/ayon_core/pipeline/workfile/path_resolving.py b/client/ayon_core/pipeline/workfile/path_resolving.py index 95a0a03c60..7718e32317 100644 --- a/client/ayon_core/pipeline/workfile/path_resolving.py +++ b/client/ayon_core/pipeline/workfile/path_resolving.py @@ -72,7 +72,7 @@ def get_workfile_template_key( try: profiles = ( project_settings - ["global"] + ["core"] ["tools"] ["Workfiles"] ["workfile_template_profiles"] @@ -157,7 +157,7 @@ def get_workdir( task_name (str): Task name for which are workdir data preapred. host_name (str): Host which is used to workdir. This is required because workdir template may contain `{app}` key. In `Session` - is stored under `AVALON_APP` key. + is stored under `AYON_HOST_NAME` key. anatomy (Anatomy): Optional argument. Anatomy object is created using project name from `project_doc`. It is preferred to pass this argument as initialization of a new Anatomy object may be time @@ -321,7 +321,7 @@ def get_last_workfile( data["app"], task_name=data["task"]["name"], task_type=data["task"]["type"], - family="workfile" + product_type="workfile" ) data.pop("comment", None) if not data.get("ext"): @@ -507,7 +507,7 @@ def create_workdir_extra_folders( # Load extra folders profiles extra_folders_profiles = ( - project_settings["global"]["tools"]["Workfiles"]["extra_folders"] + project_settings["core"]["tools"]["Workfiles"]["extra_folders"] ) # Skip if are empty if not extra_folders_profiles: diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 1afe26813f..c889e0cafb 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -27,10 +27,7 @@ from ayon_core.client import ( get_representations, get_ayon_server_api_connection, ) -from ayon_core.settings import ( - get_project_settings, - get_system_settings, -) +from ayon_core.settings import get_project_settings from ayon_core.host import IWorkfileHost, HostBase from ayon_core.lib import ( Logger, @@ -103,7 +100,7 @@ class AbstractTemplateBuilder(object): if isinstance(host, HostBase): host_name = host.name else: - host_name = os.environ.get("AVALON_APP") + host_name = os.environ.get("AYON_HOST_NAME") self._host = host self._host_name = host_name @@ -118,7 +115,6 @@ class AbstractTemplateBuilder(object): self._creators_by_name = None self._create_context = None - self._system_settings = None self._project_settings = None self._current_asset_doc = None @@ -129,35 +125,29 @@ class AbstractTemplateBuilder(object): def project_name(self): if isinstance(self._host, HostBase): return self._host.get_current_project_name() - return os.getenv("AVALON_PROJECT") + return os.getenv("AYON_PROJECT_NAME") @property def current_asset_name(self): if isinstance(self._host, HostBase): return self._host.get_current_asset_name() - return os.getenv("AVALON_ASSET") + return os.getenv("AYON_FOLDER_PATH") @property def current_task_name(self): if isinstance(self._host, HostBase): return self._host.get_current_task_name() - return os.getenv("AVALON_TASK") + return os.getenv("AYON_TASK_NAME") def get_current_context(self): if isinstance(self._host, HostBase): return self._host.get_current_context() return { "project_name": self.project_name, - "asset_name": self.current_asset_name, + "folder_path": self.current_asset_name, "task_name": self.current_task_name } - @property - def system_settings(self): - if self._system_settings is None: - self._system_settings = get_system_settings() - return self._system_settings - @property def project_settings(self): if self._project_settings is None: @@ -256,7 +246,6 @@ class AbstractTemplateBuilder(object): self._linked_asset_docs = None self._task_type = None - self._system_settings = None self._project_settings = None self.clear_shared_data() @@ -553,6 +542,12 @@ class AbstractTemplateBuilder(object): self.clear_shared_populate_data() + def open_template(self): + """Open template file with registered host.""" + template_preset = self.get_template_preset() + template_path = template_preset["path"] + self.host.open_file(template_path) + @abstractmethod def import_template(self, template_path): """ @@ -579,7 +574,7 @@ class AbstractTemplateBuilder(object): template_path (str): Fullpath for current task and host's template file. """ - last_workfile_path = os.environ.get("AVALON_LAST_WORKFILE") + last_workfile_path = os.environ.get("AYON_LAST_WORKFILE") self.log.info("__ last_workfile_path: {}".format(last_workfile_path)) if os.path.exists(last_workfile_path): # ignore in case workfile existence @@ -1293,6 +1288,10 @@ class PlaceholderLoadMixin(object): " used." ) + product_type = options.get("product_type") + if product_type is None: + product_type = options.get("family") + return [ attribute_definitions.UISeparatorDef(), attribute_definitions.UILabelDef("Main attributes"), @@ -1306,9 +1305,9 @@ class PlaceholderLoadMixin(object): tooltip=build_type_help ), attribute_definitions.EnumDef( - "family", - label="Family", - default=options.get("family"), + "product_type", + label="Product type", + default=product_type, items=families ), attribute_definitions.TextDef( @@ -1461,7 +1460,7 @@ class PlaceholderLoadMixin(object): Note: This returns all representation documents from all versions of - matching subset. To filter for last version use + matching product. To filter for last version use '_reduce_last_version_repre_docs'. Args: @@ -1552,22 +1551,22 @@ class PlaceholderLoadMixin(object): repre_context = repre_doc["context"] asset_name = repre_context["asset"] - subset_name = repre_context["subset"] + product_name = repre_context["subset"] version = repre_context.get("version", -1) if asset_name not in mapping: mapping[asset_name] = {} - subset_mapping = mapping[asset_name] - if subset_name not in subset_mapping: - subset_mapping[subset_name] = collections.defaultdict(list) + product_mapping = mapping[asset_name] + if product_name not in product_mapping: + product_mapping[product_name] = collections.defaultdict(list) - version_mapping = subset_mapping[subset_name] + version_mapping = product_mapping[product_name] version_mapping[version].append(repre_doc) output = [] - for subset_mapping in mapping.values(): - for version_mapping in subset_mapping.values(): + for product_mapping in mapping.values(): + for version_mapping in product_mapping.values(): last_version = tuple(sorted(version_mapping.keys()))[-1] output.extend(version_mapping[last_version]) return output @@ -1578,8 +1577,8 @@ class PlaceholderLoadMixin(object): Note: Ignore repre ids is to avoid loading the same representation again on load. But the representation can be loaded with different loader - and there could be published new version of matching subset for the - representation. We should maybe expect containers. + and there could be published new version of matching product for + the representation. We should maybe expect containers. Also import loaders don't have containers at all... @@ -1751,7 +1750,7 @@ class PlaceholderCreateMixin(object): tooltip=( "Creator" "\nDefines variant name which will be use for " - "\ncompiling of subset name." + "\ncompiling of product name." ) ), attribute_definitions.UISeparatorDef(), @@ -1786,10 +1785,10 @@ class PlaceholderCreateMixin(object): creator_plugin = self.builder.get_creators_by_name()[creator_name] - # create subset name + # create product name context = self._builder.get_current_context() project_name = context["project_name"] - asset_name = context["asset_name"] + asset_name = context["folder_path"] task_name = context["task_name"] if legacy_create: @@ -1797,38 +1796,38 @@ class PlaceholderCreateMixin(object): project_name, asset_name, fields=["_id"] ) assert asset_doc, "No current asset found in Session" - subset_name = creator_plugin.get_subset_name( - create_variant, - task_name, + product_name = creator_plugin.get_product_name( + project_name, asset_doc["_id"], - project_name + task_name, + create_variant, ) else: asset_doc = get_asset_by_name(project_name, asset_name) assert asset_doc, "No current asset found in Session" - subset_name = creator_plugin.get_subset_name( - create_variant, - task_name, - asset_doc, + product_name = creator_plugin.get_product_name( project_name, + asset_doc, + task_name, + create_variant, self.builder.host_name ) creator_data = { "creator_name": creator_name, "create_variant": create_variant, - "subset_name": subset_name, + "product_name": product_name, "creator_plugin": creator_plugin } self._before_instance_create(placeholder) - # compile subset name from variant + # compile product name from variant try: if legacy_create: creator_instance = creator_plugin( - subset_name, + product_name, asset_name ).process() else: diff --git a/client/ayon_core/plugins/actions/open_file_explorer.py b/client/ayon_core/plugins/actions/open_file_explorer.py index b29ed30258..fba3c231a5 100644 --- a/client/ayon_core/plugins/actions/open_file_explorer.py +++ b/client/ayon_core/plugins/actions/open_file_explorer.py @@ -22,14 +22,14 @@ class OpenTaskPath(LauncherAction): def is_compatible(self, session): """Return whether the action is compatible with the session""" - return bool(session.get("AVALON_ASSET")) + return bool(session.get("AYON_FOLDER_PATH")) def process(self, session, **kwargs): from qtpy import QtCore, QtWidgets - project_name = session["AVALON_PROJECT"] - asset_name = session["AVALON_ASSET"] - task_name = session.get("AVALON_TASK", None) + project_name = session["AYON_PROJECT_NAME"] + asset_name = session["AYON_FOLDER_PATH"] + task_name = session.get("AYON_TASK_NAME", None) path = self._get_workdir(project_name, asset_name, task_name) if not path: diff --git a/client/ayon_core/plugins/load/delete_old_versions.py b/client/ayon_core/plugins/load/delete_old_versions.py index 6b3263e2b6..4fc61ebb8b 100644 --- a/client/ayon_core/plugins/load/delete_old_versions.py +++ b/client/ayon_core/plugins/load/delete_old_versions.py @@ -359,7 +359,7 @@ # # if mongo_changes_bulk: # dbcon = AvalonMongoDB() -# dbcon.Session["AVALON_PROJECT"] = project_name +# dbcon.Session["AYON_PROJECT_NAME"] = project_name # dbcon.install() # dbcon.bulk_write(mongo_changes_bulk) # dbcon.uninstall() @@ -401,7 +401,7 @@ # import ftrack_api # # session = ftrack_api.Session() -# subset_name = data["subset"]["name"] +# product_name = data["subset"]["name"] # versions = { # '"{}"'.format(version_doc["name"]) # for version_doc in data["versions"] @@ -414,7 +414,7 @@ # " and version in ({})" # ).format( # asset_ftrack_id, -# subset_name, +# product_name, # ",".join(versions) # ) # ).all() diff --git a/client/ayon_core/plugins/publish/cleanup.py b/client/ayon_core/plugins/publish/cleanup.py index 7bed3269c2..57ef803352 100644 --- a/client/ayon_core/plugins/publish/cleanup.py +++ b/client/ayon_core/plugins/publish/cleanup.py @@ -5,7 +5,7 @@ import shutil import pyblish.api import re -from ayon_core.tests.lib import is_in_tests +from ayon_core.lib import is_in_tests class CleanUp(pyblish.api.InstancePlugin): @@ -40,7 +40,7 @@ class CleanUp(pyblish.api.InstancePlugin): active = True # Presets - paterns = None # list of regex paterns + patterns = None # list of regex patterns remove_temp_renders = True def process(self, instance): @@ -72,7 +72,7 @@ class CleanUp(pyblish.api.InstancePlugin): self.clean_renders(instance, skip_cleanup_filepaths) if [ef for ef in self.exclude_families - if instance.data["family"] in ef]: + if instance.data["productType"] in ef]: return import tempfile @@ -105,8 +105,8 @@ class CleanUp(pyblish.api.InstancePlugin): def clean_renders(self, instance, skip_cleanup_filepaths): transfers = instance.data.get("transfers", list()) - current_families = instance.data.get("families", list()) - instance_family = instance.data.get("family", None) + instance_families = instance.data.get("families", list()) + instance_product_type = instance.data.get("productType") dirnames = [] transfers_dirs = [] @@ -115,10 +115,10 @@ class CleanUp(pyblish.api.InstancePlugin): src = os.path.normpath(src) dest = os.path.normpath(dest) - # add src dir into clearing dir paths (regex paterns) + # add src dir into clearing dir paths (regex patterns) transfers_dirs.append(os.path.dirname(src)) - # add dest dir into clearing dir paths (regex paterns) + # add dest dir into clearing dir paths (regex patterns) transfers_dirs.append(os.path.dirname(dest)) if src in skip_cleanup_filepaths: @@ -127,27 +127,32 @@ class CleanUp(pyblish.api.InstancePlugin): ).format(src)) continue - if os.path.normpath(src) != os.path.normpath(dest): - if instance_family == 'render' or 'render' in current_families: - self.log.info("Removing src: `{}`...".format(src)) - try: - os.remove(src) - except PermissionError: - self.log.warning( - "Insufficient permission to delete {}".format(src) - ) - continue + if os.path.normpath(src) == os.path.normpath(dest): + continue - # add dir for cleanup - dirnames.append(os.path.dirname(src)) + if ( + instance_product_type == "render" + or "render" in instance_families + ): + self.log.info("Removing src: `{}`...".format(src)) + try: + os.remove(src) + except PermissionError: + self.log.warning( + "Insufficient permission to delete {}".format(src) + ) + continue - # clean by regex paterns + # add dir for cleanup + dirnames.append(os.path.dirname(src)) + + # clean by regex patterns # make unique set transfers_dirs = set(transfers_dirs) self.log.debug("__ transfers_dirs: `{}`".format(transfers_dirs)) - self.log.debug("__ self.paterns: `{}`".format(self.paterns)) - if self.paterns: + self.log.debug("__ self.patterns: `{}`".format(self.patterns)) + if self.patterns: files = list() # get list of all available content of dirs for _dir in transfers_dirs: @@ -159,14 +164,14 @@ class CleanUp(pyblish.api.InstancePlugin): self.log.debug("__ files: `{}`".format(files)) - # remove all files which match regex patern + # remove all files which match regex pattern for f in files: if os.path.normpath(f) in skip_cleanup_filepaths: continue - for p in self.paterns: - patern = re.compile(p) - if not patern.findall(f): + for p in self.patterns: + pattern = re.compile(p) + if not pattern.findall(f): continue if not os.path.exists(f): continue diff --git a/client/ayon_core/plugins/publish/collect_anatomy_context_data.py b/client/ayon_core/plugins/publish/collect_anatomy_context_data.py index 978ae5e1e1..b5bb579498 100644 --- a/client/ayon_core/plugins/publish/collect_anatomy_context_data.py +++ b/client/ayon_core/plugins/publish/collect_anatomy_context_data.py @@ -4,9 +4,9 @@ Requires: context -> anatomy context -> projectEntity context -> assetEntity + context -> task context -> username context -> datetimeData - session -> AVALON_TASK Provides: context -> anatomyData @@ -47,7 +47,7 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): def process(self, context): host_name = context.data["hostName"] - system_settings = context.data["system_settings"] + project_settings = context.data["project_settings"] project_entity = context.data["projectEntity"] asset_entity = context.data.get("assetEntity") task_name = None @@ -55,7 +55,11 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): task_name = context.data["task"] anatomy_data = get_template_data( - project_entity, asset_entity, task_name, host_name, system_settings + project_entity, + asset_entity, + task_name, + host_name, + project_settings ) anatomy_data.update(context.data.get("datetimeData") or {}) diff --git a/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py b/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py index 336ac02b8e..b62935dd6a 100644 --- a/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py +++ b/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py @@ -3,9 +3,9 @@ Requires: context -> anatomyData context -> projectEntity context -> assetEntity - instance -> asset - instance -> subset - instance -> family + instance -> folderPath + instance -> productName + instance -> productType Optional: instance -> version @@ -68,7 +68,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): instances_with_missing_asset_doc = collections.defaultdict(list) for instance in context: instance_asset_doc = instance.data.get("assetEntity") - _asset_name = instance.data["asset"] + _asset_name = instance.data["folderPath"] # There is possibility that assetEntity on instance is already set # which can happen in standalone publisher @@ -120,7 +120,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): ).format(joined_asset_names)) def fill_latest_versions(self, context, project_name): - """Try to find latest version for each instance's subset. + """Try to find latest version for each instance's product name. Key "latestVersion" is always set to latest version or `None`. @@ -134,7 +134,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): self.log.debug("Querying latest versions for instances.") hierarchy = {} - names_by_asset_ids = collections.defaultdict(set) + names_by_folder_ids = collections.defaultdict(set) for instance in context: # Make sure `"latestVersion"` key is set latest_version = instance.data.get("latestVersion") @@ -145,41 +145,41 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): if not asset_doc: continue - # Store asset ids and subset names for queries - asset_id = asset_doc["_id"] - subset_name = instance.data["subset"] + # Store folder ids and product names for queries + folder_id = asset_doc["_id"] + product_name = instance.data["productName"] # Prepare instance hierarchy for faster filling latest versions - if asset_id not in hierarchy: - hierarchy[asset_id] = {} - if subset_name not in hierarchy[asset_id]: - hierarchy[asset_id][subset_name] = [] - hierarchy[asset_id][subset_name].append(instance) - names_by_asset_ids[asset_id].add(subset_name) + if folder_id not in hierarchy: + hierarchy[folder_id] = {} + if product_name not in hierarchy[folder_id]: + hierarchy[folder_id][product_name] = [] + hierarchy[folder_id][product_name].append(instance) + names_by_folder_ids[folder_id].add(product_name) subset_docs = [] - if names_by_asset_ids: + if names_by_folder_ids: subset_docs = list(get_subsets( - project_name, names_by_asset_ids=names_by_asset_ids + project_name, names_by_asset_ids=names_by_folder_ids )) - subset_ids = [ + product_ids = { subset_doc["_id"] for subset_doc in subset_docs - ] + } - last_version_docs_by_subset_id = get_last_versions( - project_name, subset_ids, fields=["name"] + last_version_docs_by_product_id = get_last_versions( + project_name, product_ids, fields=["name"] ) for subset_doc in subset_docs: - subset_id = subset_doc["_id"] - last_version_doc = last_version_docs_by_subset_id.get(subset_id) + product_id = subset_doc["_id"] + last_version_doc = last_version_docs_by_product_id.get(product_id) if last_version_doc is None: continue - asset_id = subset_doc["parent"] - subset_name = subset_doc["name"] - _instances = hierarchy[asset_id][subset_name] + folder_id = subset_doc["parent"] + product_name = subset_doc["name"] + _instances = hierarchy[folder_id][product_name] for _instance in _instances: _instance.data["latestVersion"] = last_version_doc["name"] @@ -191,9 +191,15 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): for instance in context: anatomy_data = copy.deepcopy(context.data["anatomyData"]) + product_name = instance.data["productName"] + product_type = instance.data["productType"] anatomy_data.update({ - "family": instance.data["family"], - "subset": instance.data["subset"], + "family": product_type, + "subset": product_name, + "product": { + "name": product_name, + "type": product_type, + } }) self._fill_asset_data(instance, project_doc, anatomy_data) @@ -227,8 +233,8 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): instance.context.data["hostName"], task_name=task_name, task_type=task_type, - family=instance.data["family"], - subset=instance.data["subset"] + product_type=instance.data["productType"], + product_name=instance.data["productName"] ) anatomy_data["version"] = version_number @@ -296,7 +302,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): if hierarchy: parent_name = hierarchy.split("/")[-1] - asset_name = instance.data["asset"].split("/")[-1] + asset_name = instance.data["folderPath"].split("/")[-1] anatomy_data.update({ "asset": asset_name, "hierarchy": hierarchy, @@ -337,7 +343,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): # Try to find task data based on hierarchy context and asset name hierarchy_context = instance.context.data.get("hierarchyContext") - asset_name = instance.data.get("asset") + asset_name = instance.data.get("folderPath") if not hierarchy_context or not asset_name: return diff --git a/client/ayon_core/plugins/publish/collect_audio.py b/client/ayon_core/plugins/publish/collect_audio.py index 94477e5578..58419e7284 100644 --- a/client/ayon_core/plugins/publish/collect_audio.py +++ b/client/ayon_core/plugins/publish/collect_audio.py @@ -14,7 +14,7 @@ from ayon_core.pipeline.load import get_representation_path_with_anatomy class CollectAudio(pyblish.api.ContextPlugin): """Collect asset's last published audio. - The audio subset name searched for is defined in: + The audio product name searched for is defined in: project settings > Collect Audio Note: @@ -40,10 +40,13 @@ class CollectAudio(pyblish.api.ContextPlugin): "webpublisher", "aftereffects", "flame", - "unreal" + "unreal", + "blender", + "houdini", + "max", ] - audio_subset_name = "audioMain" + audio_product_name = "audioMain" def process(self, context): # Fake filtering by family inside context plugin @@ -66,14 +69,14 @@ class CollectAudio(pyblish.api.ContextPlugin): # Add audio to instance if exists. instances_by_asset_name = collections.defaultdict(list) for instance in filtered_instances: - asset_name = instance.data["asset"] + asset_name = instance.data["folderPath"] instances_by_asset_name[asset_name].append(instance) asset_names = set(instances_by_asset_name.keys()) self.log.debug(( - "Searching for audio subset '{subset}' in assets {assets}" + "Searching for audio product '{product}' in assets {assets}" ).format( - subset=self.audio_subset_name, + product=self.audio_product_name, assets=", ".join([ '"{}"'.format(asset_name) for asset_name in asset_names @@ -102,64 +105,64 @@ class CollectAudio(pyblish.api.ContextPlugin): }] self.log.debug("Audio Data added to instance ...") - def query_representations(self, project_name, asset_names): - """Query representations related to audio subsets for passed assets. + def query_representations(self, project_name, folder_paths): + """Query representations related to audio products for passed assets. Args: project_name (str): Project in which we're looking for all entities. - asset_names (Iterable[str]): Asset names where to look for audio - subsets and their representations. + folder_paths (Iterable[str]): Folder paths where to look for audio + products and their representations. Returns: collections.defaultdict[str, List[Dict[Str, Any]]]: Representations - related to audio subsets by asset name. + related to audio products by asset name. """ output = collections.defaultdict(list) # Query asset documents asset_docs = get_assets( project_name, - asset_names=asset_names, + asset_names=folder_paths, fields=["_id", "name", "data.parents"] ) - asset_id_by_name = { + folder_id_by_path = { get_asset_name_identifier(asset_doc): asset_doc["_id"] for asset_doc in asset_docs } - asset_ids = set(asset_id_by_name.values()) + folder_ids = set(folder_id_by_path.values()) - # Query subsets with name define by 'audio_subset_name' attr - # - one or none subsets with the name should be available on an asset + # Query products with name define by 'audio_product_name' attr + # - one or none products with the name should be available on an asset subset_docs = get_subsets( project_name, - subset_names=[self.audio_subset_name], - asset_ids=asset_ids, + subset_names=[self.audio_product_name], + asset_ids=folder_ids, fields=["_id", "parent"] ) - subset_id_by_asset_id = {} + product_id_by_folder_id = {} for subset_doc in subset_docs: - asset_id = subset_doc["parent"] - subset_id_by_asset_id[asset_id] = subset_doc["_id"] + folder_id = subset_doc["parent"] + product_id_by_folder_id[folder_id] = subset_doc["_id"] - subset_ids = set(subset_id_by_asset_id.values()) - if not subset_ids: + product_ids = set(product_id_by_folder_id.values()) + if not product_ids: return output - # Find all latest versions for the subsets - version_docs_by_subset_id = get_last_versions( - project_name, subset_ids=subset_ids, fields=["_id", "parent"] + # Find all latest versions for the products + version_docs_by_product_id = get_last_versions( + project_name, subset_ids=product_ids, fields=["_id", "parent"] ) - version_id_by_subset_id = { - subset_id: version_doc["_id"] - for subset_id, version_doc in version_docs_by_subset_id.items() + version_id_by_product_id = { + product_id: version_doc["_id"] + for product_id, version_doc in version_docs_by_product_id.items() } - version_ids = set(version_id_by_subset_id.values()) + version_ids = set(version_id_by_product_id.values()) if not version_ids: return output - # Find representations under latest versions of audio subsets + # Find representations under latest versions of audio products repre_docs = get_representations( project_name, version_ids=version_ids ) @@ -171,9 +174,9 @@ class CollectAudio(pyblish.api.ContextPlugin): if not repre_docs_by_version_id: return output - for asset_name in asset_names: - asset_id = asset_id_by_name.get(asset_name) - subset_id = subset_id_by_asset_id.get(asset_id) - version_id = version_id_by_subset_id.get(subset_id) - output[asset_name] = repre_docs_by_version_id[version_id] + for folder_path in folder_paths: + folder_id = folder_id_by_path.get(folder_path) + product_id = product_id_by_folder_id.get(folder_id) + version_id = version_id_by_product_id.get(product_id) + output[folder_path] = repre_docs_by_version_id[version_id] return output diff --git a/client/ayon_core/plugins/publish/collect_comment.py b/client/ayon_core/plugins/publish/collect_comment.py index dadb7b9e8d..980097ac0d 100644 --- a/client/ayon_core/plugins/publish/collect_comment.py +++ b/client/ayon_core/plugins/publish/collect_comment.py @@ -42,8 +42,8 @@ class CollectInstanceCommentDef( pass @classmethod - def apply_settings(cls, project_setting, _): - plugin_settings = project_setting["global"]["publish"].get( + def apply_settings(cls, project_setting): + plugin_settings = project_setting["core"]["publish"].get( "collect_comment_per_instance" ) if not plugin_settings: diff --git a/client/ayon_core/plugins/publish/collect_context_entities.py b/client/ayon_core/plugins/publish/collect_context_entities.py index 8480435e21..64ef73e2d9 100644 --- a/client/ayon_core/plugins/publish/collect_context_entities.py +++ b/client/ayon_core/plugins/publish/collect_context_entities.py @@ -1,7 +1,6 @@ """Collect Anatomy and global anatomy data. Requires: - session -> AVALON_ASSET context -> projectName context -> asset context -> task @@ -26,7 +25,7 @@ class CollectContextEntities(pyblish.api.ContextPlugin): def process(self, context): project_name = context.data["projectName"] - asset_name = context.data["asset"] + asset_name = context.data["folderPath"] task_name = context.data["task"] project_entity = get_project(project_name) diff --git a/client/ayon_core/plugins/publish/collect_current_context.py b/client/ayon_core/plugins/publish/collect_current_context.py index 90b9fcdcbd..76d30a913e 100644 --- a/client/ayon_core/plugins/publish/collect_current_context.py +++ b/client/ayon_core/plugins/publish/collect_current_context.py @@ -1,7 +1,7 @@ """ Provides: context -> projectName (str) - context -> asset (str) + context -> folderPath (str) context -> task (str) """ @@ -21,7 +21,7 @@ class CollectCurrentContext(pyblish.api.ContextPlugin): def process(self, context): # Check if values are already set project_name = context.data.get("projectName") - asset_name = context.data.get("asset") + asset_name = context.data.get("folderPath") task_name = context.data.get("task") current_context = get_current_context() @@ -29,13 +29,12 @@ class CollectCurrentContext(pyblish.api.ContextPlugin): context.data["projectName"] = current_context["project_name"] if not asset_name: - context.data["asset"] = current_context["asset_name"] + context.data["folderPath"] = current_context["folder_path"] if not task_name: context.data["task"] = current_context["task_name"] # QUESTION should we be explicit with keys? (the same on instances) - # - 'asset' -> 'assetName' # - 'task' -> 'taskName' self.log.info(( @@ -45,6 +44,6 @@ class CollectCurrentContext(pyblish.api.ContextPlugin): "Task: {task_name}" ).format( project_name=context.data["projectName"], - asset_name=context.data["asset"], + asset_name=context.data["folderPath"], task_name=context.data["task"] )) diff --git a/client/ayon_core/plugins/publish/collect_custom_staging_dir.py b/client/ayon_core/plugins/publish/collect_custom_staging_dir.py index 6840c8e416..e42f34b0ae 100644 --- a/client/ayon_core/plugins/publish/collect_custom_staging_dir.py +++ b/client/ayon_core/plugins/publish/collect_custom_staging_dir.py @@ -38,8 +38,8 @@ class CollectCustomStagingDir(pyblish.api.InstancePlugin): template_key = "transient" def process(self, instance): - family = instance.data["family"] - subset_name = instance.data["subset"] + product_type = instance.data["productType"] + product_name = instance.data["productName"] host_name = instance.context.data["hostName"] project_name = instance.context.data["projectName"] project_settings = instance.context.data["project_settings"] @@ -47,9 +47,15 @@ class CollectCustomStagingDir(pyblish.api.InstancePlugin): task = instance.data["anatomyData"].get("task", {}) transient_tml, is_persistent = get_custom_staging_dir_info( - project_name, host_name, family, task.get("name"), - task.get("type"), subset_name, project_settings=project_settings, - anatomy=anatomy, log=self.log) + project_name, + host_name, + product_type, + product_name, + task.get("name"), + task.get("type"), + project_settings=project_settings, + anatomy=anatomy, + log=self.log) if transient_tml: anatomy_data = copy.deepcopy(instance.data["anatomyData"]) @@ -66,5 +72,5 @@ class CollectCustomStagingDir(pyblish.api.InstancePlugin): result_str = "Not adding" self.log.debug("{} custom staging dir for instance with '{}'".format( - result_str, family + result_str, product_type )) diff --git a/client/ayon_core/plugins/publish/collect_frames_fix.py b/client/ayon_core/plugins/publish/collect_frames_fix.py index 4903991d40..0fe86b8d70 100644 --- a/client/ayon_core/plugins/publish/collect_frames_fix.py +++ b/client/ayon_core/plugins/publish/collect_frames_fix.py @@ -17,7 +17,7 @@ class CollectFramesFixDef( ): """Provides text field to insert frame(s) to be rerendered. - Published files of last version of an instance subset are collected into + Published files of last version of an instance product are collected into instance.data["last_version_published_files"]. All these but frames mentioned in text field will be reused for new version. """ @@ -40,15 +40,15 @@ class CollectFramesFixDef( instance.data["frames_to_fix"] = frames_to_fix - subset_name = instance.data["subset"] - asset_name = instance.data["asset"] + product_name = instance.data["productName"] + asset_name = instance.data["folderPath"] project_entity = instance.data["projectEntity"] project_name = project_entity["name"] version = get_last_version_by_subset_name( project_name, - subset_name, + product_name, asset_name=asset_name ) if not version: diff --git a/client/ayon_core/plugins/publish/collect_from_create_context.py b/client/ayon_core/plugins/publish/collect_from_create_context.py index d38138b2e9..8218806c4c 100644 --- a/client/ayon_core/plugins/publish/collect_from_create_context.py +++ b/client/ayon_core/plugins/publish/collect_from_create_context.py @@ -5,7 +5,7 @@ import os import pyblish.api from ayon_core.host import IPublishHost -from ayon_core.pipeline import legacy_io, registered_host +from ayon_core.pipeline import registered_host from ayon_core.pipeline.create import CreateContext @@ -38,7 +38,6 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): for created_instance in create_context.instances: instance_data = created_instance.data_to_store() - instance_data["asset"] = instance_data.pop("folderPath") if instance_data["active"]: thumbnail_path = thumbnail_paths_by_instance_id.get( created_instance.id @@ -57,12 +56,14 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): asset_name = create_context.get_current_asset_name() task_name = create_context.get_current_task_name() for key, value in ( - ("AVALON_PROJECT", project_name), - ("AVALON_ASSET", asset_name), - ("AVALON_TASK", task_name) + ("AYON_PROJECT_NAME", project_name), + ("AYON_FOLDER_PATH", asset_name), + ("AYON_TASK_NAME", task_name) ): - legacy_io.Session[key] = value - os.environ[key] = value + if value is None: + os.environ.pop(key, None) + else: + os.environ[key] = value def create_instance( self, @@ -71,18 +72,22 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): transient_data, thumbnail_path ): - subset = in_data["subset"] + product_name = in_data["productName"] # If instance data already contain families then use it instance_families = in_data.get("families") or [] + # Add product type to families + instance_families.append(in_data["productType"]) - instance = context.create_instance(subset) + instance = context.create_instance(product_name) instance.data.update({ - "subset": subset, - "asset": in_data["asset"], + "publish": True, + "label": in_data.get("label") or product_name, + "name": product_name, + "folderPath": in_data["folderPath"], "task": in_data["task"], - "label": in_data.get("label") or subset, - "name": subset, - "family": in_data["family"], + "productName": product_name, + "productType": in_data["productType"], + "family": in_data["productType"], "families": instance_families, "representations": [], "thumbnailSource": thumbnail_path diff --git a/client/ayon_core/plugins/publish/collect_hierarchy.py b/client/ayon_core/plugins/publish/collect_hierarchy.py index 32f10ba4c8..8ba83d582f 100644 --- a/client/ayon_core/plugins/publish/collect_hierarchy.py +++ b/client/ayon_core/plugins/publish/collect_hierarchy.py @@ -28,11 +28,13 @@ class CollectHierarchy(pyblish.api.ContextPlugin): # shot data dict shot_data = {} - family = instance.data["family"] + product_type = instance.data["productType"] families = instance.data["families"] # exclude other families then self.families with intersection - if not set(self.families).intersection(set(families + [family])): + if not set(self.families).intersection( + set(families + [product_type]) + ): continue # exclude if not masterLayer True @@ -62,7 +64,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): "pixelAspect": instance.data["pixelAspect"] } # Split by '/' for AYON where asset is a path - name = instance.data["asset"].split("/")[-1] + name = instance.data["folderPath"].split("/")[-1] actual = {name: shot_data} for parent in reversed(instance.data["parents"]): diff --git a/client/ayon_core/plugins/publish/collect_host_name.py b/client/ayon_core/plugins/publish/collect_host_name.py index 89e4e03c1a..e76579bbd2 100644 --- a/client/ayon_core/plugins/publish/collect_host_name.py +++ b/client/ayon_core/plugins/publish/collect_host_name.py @@ -24,13 +24,13 @@ class CollectHostName(pyblish.api.ContextPlugin): if host_name and app_name and app_label: return - # Use AVALON_APP to get host name if available + # Use AYON_HOST_NAME to get host name if available if not host_name: - host_name = os.environ.get("AVALON_APP") + host_name = os.environ.get("AYON_HOST_NAME") - # Use AVALON_APP_NAME to get full app name + # Use AYON_APP_NAME to get full app name if not app_name: - app_name = os.environ.get("AVALON_APP_NAME") + app_name = os.environ.get("AYON_APP_NAME") # Fill missing values based on app full name if (not host_name or not app_label) and app_name: diff --git a/client/ayon_core/plugins/publish/collect_input_representations_to_versions.py b/client/ayon_core/plugins/publish/collect_input_representations_to_versions.py index b5c9872e74..6caee1be6a 100644 --- a/client/ayon_core/plugins/publish/collect_input_representations_to_versions.py +++ b/client/ayon_core/plugins/publish/collect_input_representations_to_versions.py @@ -1,7 +1,5 @@ import pyblish.api -from bson.objectid import ObjectId - from ayon_core.client import get_representations diff --git a/client/ayon_core/plugins/publish/collect_otio_review.py b/client/ayon_core/plugins/publish/collect_otio_review.py index 0e4d596213..69cf9199e7 100644 --- a/client/ayon_core/plugins/publish/collect_otio_review.py +++ b/client/ayon_core/plugins/publish/collect_otio_review.py @@ -91,7 +91,7 @@ class CollectOtioReview(pyblish.api.InstancePlugin): if otio_review_clips: # add review track to instance and change label to reflect it - label = instance.data.get("label", instance.data["subset"]) + label = instance.data.get("label", instance.data["productName"]) instance.data["label"] = label + " (review)" instance.data["families"] += ["review", "ftrack"] instance.data["otioReviewClips"] = otio_review_clips diff --git a/client/ayon_core/plugins/publish/collect_otio_subset_resources.py b/client/ayon_core/plugins/publish/collect_otio_subset_resources.py index e6817a4beb..3f47e6e3bf 100644 --- a/client/ayon_core/plugins/publish/collect_otio_subset_resources.py +++ b/client/ayon_core/plugins/publish/collect_otio_subset_resources.py @@ -16,7 +16,7 @@ from ayon_core.pipeline.publish import ( class CollectOtioSubsetResources(pyblish.api.InstancePlugin): - """Get Resources for a subset version""" + """Get Resources for a product version""" label = "Collect OTIO Subset Resources" order = pyblish.api.CollectorOrder + 0.491 @@ -32,7 +32,7 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): make_sequence_collection ) - if "audio" in instance.data["family"]: + if "audio" in instance.data["productType"]: return if not instance.data.get("representations"): @@ -250,14 +250,14 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): # Task can be optional in anatomy data host_name = context.data["hostName"] - family = instance.data["family"] + product_type = instance.data["productType"] anatomy_data = instance.data["anatomyData"] task_info = anatomy_data.get("task") or {} return get_publish_template_name( project_name, host_name, - family, + product_type, task_name=task_info.get("name"), task_type=task_info.get("type"), project_settings=context.data["project_settings"], diff --git a/client/ayon_core/plugins/publish/collect_rendered_files.py b/client/ayon_core/plugins/publish/collect_rendered_files.py index 5ffcd669a0..ca88a7aa82 100644 --- a/client/ayon_core/plugins/publish/collect_rendered_files.py +++ b/client/ayon_core/plugins/publish/collect_rendered_files.py @@ -12,7 +12,7 @@ import json import pyblish.api -from ayon_core.pipeline import legacy_io, KnownPublishError +from ayon_core.pipeline import KnownPublishError from ayon_core.pipeline.publish.lib import add_repre_files_for_cleanup @@ -71,14 +71,19 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): """ # validate basic necessary data data_err = "invalid json file - missing data" - required = ["asset", "user", "comment", - "job", "instances", "session", "version"] + required = ["user", "comment", + "job", "instances", "version"] assert all(elem in data.keys() for elem in required), data_err + if "folderPath" not in data and "asset" not in data: + raise AssertionError(data_err) + + if "folderPath" not in data: + data["folderPath"] = data.pop("asset") # set context by first json file ctx = self._context.data - ctx["asset"] = ctx.get("asset") or data.get("asset") + ctx["folderPath"] = ctx.get("folderPath") or data.get("folderPath") ctx["intent"] = ctx.get("intent") or data.get("intent") ctx["comment"] = ctx.get("comment") or data.get("comment") ctx["user"] = ctx.get("user") or data.get("user") @@ -87,7 +92,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): # basic sanity check to see if we are working in same context # if some other json file has different context, bail out. ctx_err = "inconsistent contexts in json files - %s" - assert ctx.get("asset") == data.get("asset"), ctx_err % "asset" + assert ctx.get("folderPath") == data.get("folderPath"), ctx_err % "folderPath" assert ctx.get("intent") == data.get("intent"), ctx_err % "intent" assert ctx.get("comment") == data.get("comment"), ctx_err % "comment" assert ctx.get("user") == data.get("user"), ctx_err % "user" @@ -98,9 +103,9 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): for instance_data in data.get("instances"): self.log.debug(" - processing instance for {}".format( - instance_data.get("subset"))) + instance_data.get("productName"))) instance = self._context.create_instance( - instance_data.get("subset") + instance_data.get("productName") ) self._fill_staging_dir(instance_data, anatomy) @@ -144,7 +149,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): os.environ.get("AYON_PUBLISH_DATA") or os.environ.get("OPENPYPE_PUBLISH_DATA") ) - if publish_data_paths: + if not publish_data_paths: raise KnownPublishError("Missing `AYON_PUBLISH_DATA`") # QUESTION @@ -165,24 +170,28 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): path = anatomy.fill_root(path) data = self._load_json(path) assert data, "failed to load json file" - if not session_is_set: - session_data = data["session"] - remapped = anatomy.roots_obj.path_remapper( - session_data["AVALON_WORKDIR"] - ) - if remapped: - session_data["AVALON_WORKDIR"] = remapped - - self.log.debug("Setting session using data from file") - legacy_io.Session.update(session_data) - os.environ.update(session_data) + session_data = data.get("session") + if not session_is_set and session_data: session_is_set = True + self.log.debug("Setting session using data from file") + os.environ.update(session_data) + staging_dir_persistent = self._process_path(data, anatomy) if not staging_dir_persistent: context.data["cleanupFullPaths"].append(path) context.data["cleanupEmptyDirs"].append( os.path.dirname(path) ) + + # Remap workdir if it's set + workdir = os.getenv("AYON_WORKDIR") + remapped_workdir = None + if workdir: + remapped_workdir = anatomy.roots_obj.path_remapper( + os.getenv("AYON_WORKDIR") + ) + if remapped_workdir: + os.environ["AYON_WORKDIR"] = remapped_workdir except Exception as e: self.log.error(e, exc_info=True) raise Exception("Error") from e diff --git a/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py b/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py index 397a3ce87c..c1326f164d 100644 --- a/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py +++ b/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py @@ -67,9 +67,9 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): # NOTE: # may have more then one representation that are same version version = { - "subsetName": con["name"], - "representation": repre_doc["_id"], - "version": repre_doc["parent"], + "container_name": con["name"], + "representation_id": repre_doc["_id"], + "version_id": repre_doc["parent"], } loaded_versions.append(version) diff --git a/client/ayon_core/plugins/publish/collect_scene_version.py b/client/ayon_core/plugins/publish/collect_scene_version.py index 254d3c913d..b04900c74e 100644 --- a/client/ayon_core/plugins/publish/collect_scene_version.py +++ b/client/ayon_core/plugins/publish/collect_scene_version.py @@ -1,8 +1,7 @@ import os import pyblish.api -from ayon_core.lib import get_version_from_path -from ayon_core.tests.lib import is_in_tests +from ayon_core.lib import get_version_from_path, is_in_tests from ayon_core.pipeline import KnownPublishError diff --git a/client/ayon_core/plugins/publish/collect_settings.py b/client/ayon_core/plugins/publish/collect_settings.py index 4e3331209d..66b89a114c 100644 --- a/client/ayon_core/plugins/publish/collect_settings.py +++ b/client/ayon_core/plugins/publish/collect_settings.py @@ -1,8 +1,5 @@ from pyblish import api -from ayon_core.settings import ( - get_current_project_settings, - get_system_settings, -) +from ayon_core.settings import get_current_project_settings class CollectSettings(api.ContextPlugin): @@ -13,4 +10,3 @@ class CollectSettings(api.ContextPlugin): def process(self, context): context.data["project_settings"] = get_current_project_settings() - context.data["system_settings"] = get_system_settings() diff --git a/client/ayon_core/plugins/publish/extract_burnin.py b/client/ayon_core/plugins/publish/extract_burnin.py index 2b76527d5f..ab6353a29f 100644 --- a/client/ayon_core/plugins/publish/extract_burnin.py +++ b/client/ayon_core/plugins/publish/extract_burnin.py @@ -65,8 +65,8 @@ class ExtractBurnin(publish.Extractor): # Default options for burnins for cases that are not set in presets. default_options = { "font_size": 42, - "font_color": [255, 255, 255, 255], - "bg_color": [0, 0, 0, 127], + "font_color": [255, 255, 255, 1.0], + "bg_color": [0, 0, 0, 0.5], "bg_padding": 5, "x_offset": 5, "y_offset": 5 @@ -96,7 +96,20 @@ class ExtractBurnin(publish.Extractor): instance.data["representations"].remove(repre) def _get_burnins_per_representations(self, instance, src_burnin_defs): - self.log.debug("Filtering of representations and their burnins starts") + """ + + Args: + instance (pyblish.api.Instance): Pyblish instance. + src_burnin_defs (list): Burnin definitions. + + Returns: + list[tuple[dict, list]]: List of tuples containing representation + and its burnin definitions. + + """ + self.log.debug( + "Filtering of representations and their burnins starts" + ) filtered_repres = [] repres = instance.data.get("representations") or [] @@ -111,16 +124,13 @@ class ExtractBurnin(publish.Extractor): ) burnin_defs = copy.deepcopy(src_burnin_defs) - self.log.debug( - "burnin_defs.keys(): {}".format(burnin_defs.keys()) - ) # Filter output definition by `burnin` represetation key - repre_linked_burnins = { - name: output - for name, output in burnin_defs.items() - if name in repre_burnin_links - } + repre_linked_burnins = [ + burnin_def + for burnin_def in burnin_defs + if burnin_def["name"] in repre_burnin_links + ] self.log.debug( "repre_linked_burnins: {}".format(repre_linked_burnins) ) @@ -146,28 +156,32 @@ class ExtractBurnin(publish.Extractor): def main_process(self, instance): host_name = instance.context.data["hostName"] - family = instance.data["family"] + product_type = instance.data["productType"] + product_name = instance.data["productName"] task_data = instance.data["anatomyData"].get("task", {}) task_name = task_data.get("name") task_type = task_data.get("type") - subset = instance.data["subset"] filtering_criteria = { "hosts": host_name, - "families": family, + "product_types": product_type, + "product_names": product_name, "task_names": task_name, "task_types": task_type, - "subset": subset } - profile = filter_profiles(self.profiles, filtering_criteria, - logger=self.log) - + profile = filter_profiles( + self.profiles, + filtering_criteria, + logger=self.log + ) if not profile: self.log.debug(( "Skipped instance. None of profiles in presets are for" - " Host: \"{}\" | Families: \"{}\" | Task \"{}\"" - " | Task type \"{}\" | Subset \"{}\" " - ).format(host_name, family, task_name, task_type, subset)) + " Host: \"{}\" | Product type: \"{}\" | Product name \"{}\"" + " | Task name \"{}\" | Task type \"{}\"" + ).format( + host_name, product_type, product_name, task_name, task_type + )) return # Pre-filter burnin definitions by instance families @@ -175,9 +189,9 @@ class ExtractBurnin(publish.Extractor): if not burnin_defs: self.log.debug(( "Skipped instance. Burnin definitions are not set for profile" - " Host: \"{}\" | Families: \"{}\" | Task \"{}\"" + " Host: \"{}\" | Product type: \"{}\" | Task name \"{}\"" " | Profile \"{}\"" - ).format(host_name, family, task_name, profile)) + ).format(host_name, product_type, task_name, profile)) return burnin_options = self._get_burnin_options() @@ -275,7 +289,8 @@ class ExtractBurnin(publish.Extractor): # it in review? # burnin_data["fps"] = fps - for filename_suffix, burnin_def in repre_burnin_defs.items(): + for burnin_def in repre_burnin_defs: + filename_suffix = burnin_def["name"] new_repre = copy.deepcopy(repre) new_repre["stagingDir"] = src_repre_staging_dir @@ -288,16 +303,28 @@ class ExtractBurnin(publish.Extractor): burnin_values = {} for key in self.positions: value = burnin_def.get(key) - if value: - burnin_values[key] = value.replace( - "{task}", "{task[name]}" - ) + if not value: + continue + # TODO remove replacements + burnin_values[key] = ( + value + .replace("{task}", "{task[name]}") + .replace("{product[name]}", "{subset}") + .replace("{Product[name]}", "{Subset}") + .replace("{PRODUCT[NAME]}", "{SUBSET}") + .replace("{product[type]}", "{family}") + .replace("{Product[type]}", "{Family}") + .replace("{PRODUCT[TYPE]}", "{FAMILY}") + .replace("{folder[name]}", "{asset}") + .replace("{Folder[name]}", "{Asset}") + .replace("{FOLDER[NAME]}", "{ASSET}") + ) # Remove "delete" tag from new representation if "delete" in new_repre["tags"]: new_repre["tags"].remove("delete") - if len(repre_burnin_defs.keys()) > 1: + if len(repre_burnin_defs) > 1: # Update name and outputName to be # able have multiple outputs in case of more burnin presets # Join previous "outputName" with filename suffix @@ -401,8 +428,7 @@ class ExtractBurnin(publish.Extractor): bg_color_hex = "#{0:0>2X}{1:0>2X}{2:0>2X}".format( bg_red, bg_green, bg_blue ) - bg_color_alpha = float(bg_alpha) / 255 - burnin_options["bg_opacity"] = bg_color_alpha + burnin_options["bg_opacity"] = bg_alpha burnin_options["bg_color"] = bg_color_hex # FG Color @@ -412,8 +438,7 @@ class ExtractBurnin(publish.Extractor): fg_color_hex = "#{0:0>2X}{1:0>2X}{2:0>2X}".format( fg_red, fg_green, fg_blue ) - fg_color_alpha = float(fg_alpha) / 255 - burnin_options["opacity"] = fg_color_alpha + burnin_options["opacity"] = fg_alpha burnin_options["font_color"] = fg_color_hex # Define font filepath @@ -543,15 +568,16 @@ class ExtractBurnin(publish.Extractor): Burnin definitions without tags filter are marked as valid. Args: - outputs (list): Contain list of burnin definitions from presets. + burnin_defs (list): Burnin definitions. tags (list): Tags of processed representation. Returns: list: Containg all burnin definitions matching entered tags. + """ - filtered_burnins = {} + filtered_burnins = [] repre_tags_low = set(tag.lower() for tag in tags) - for filename_suffix, burnin_def in burnin_defs.items(): + for burnin_def in burnin_defs: valid = True tag_filters = burnin_def["filter"]["tags"] if tag_filters: @@ -561,8 +587,7 @@ class ExtractBurnin(publish.Extractor): valid = bool(repre_tags_low & tag_filters_low) if valid: - filtered_burnins[filename_suffix] = burnin_def - + filtered_burnins.append(burnin_def) return filtered_burnins def input_output_paths( @@ -724,7 +749,7 @@ class ExtractBurnin(publish.Extractor): Returns: list: Containg all valid output definitions. """ - filtered_burnin_defs = {} + filtered_burnin_defs = [] burnin_defs = profile.get("burnins") if not burnin_defs: @@ -732,13 +757,11 @@ class ExtractBurnin(publish.Extractor): families = self.families_from_instance(instance) - for filename_suffix, orig_burnin_def in burnin_defs.items(): + for orig_burnin_def in burnin_defs: burnin_def = copy.deepcopy(orig_burnin_def) - def_filter = burnin_def.get("filter", None) or {} - for key in ("families", "tags"): - if key not in def_filter: - def_filter[key] = [] + filename_suffix = burnin_def["name"] + def_filter = burnin_def["filter"] families_filters = def_filter["families"] if not self.families_filter_validation( families, families_filters @@ -752,10 +775,13 @@ class ExtractBurnin(publish.Extractor): continue # Burnin values + new_burnin_def = {} burnin_values = {} for key, value in tuple(burnin_def.items()): key_low = key.lower() - if key_low in self.positions and value: + if key_low not in self.positions: + new_burnin_def[key] = value + elif value: burnin_values[key_low] = value # Skip processing if burnin values are not set @@ -767,9 +793,9 @@ class ExtractBurnin(publish.Extractor): ).format(filename_suffix, str(orig_burnin_def))) continue - burnin_values["filter"] = def_filter + new_burnin_def.update(burnin_values) - filtered_burnin_defs[filename_suffix] = burnin_values + filtered_burnin_defs.append(new_burnin_def) self.log.debug(( "Burnin definition \"{}\" passed first filtering." diff --git a/client/ayon_core/plugins/publish/extract_color_transcode.py b/client/ayon_core/plugins/publish/extract_color_transcode.py index 66ba8ad2be..b5ddebe05b 100644 --- a/client/ayon_core/plugins/publish/extract_color_transcode.py +++ b/client/ayon_core/plugins/publish/extract_color_transcode.py @@ -26,11 +26,11 @@ class ExtractOIIOTranscode(publish.Extractor): This dict contains source colorspace information, collected by hosts. Target colorspace is selected by profiles in the Settings, based on: - - families - - host + - host names + - product types + - product names - task types - task names - - subset names Can produce one or more representations (with different extensions) based on output definition in format: @@ -81,6 +81,7 @@ class ExtractOIIOTranscode(publish.Extractor): if not profile: return + profile_output_defs = profile["outputs"] new_representations = [] repres = instance.data["representations"] for idx, repre in enumerate(list(repres)): @@ -98,7 +99,8 @@ class ExtractOIIOTranscode(publish.Extractor): self.log.warning("Config file doesn't exist, skipping") continue - for output_name, output_def in profile.get("outputs", {}).items(): + for output_def in profile_output_defs: + output_name = output_def["name"] new_repre = copy.deepcopy(repre) original_staging_dir = new_repre["stagingDir"] @@ -311,17 +313,17 @@ class ExtractOIIOTranscode(publish.Extractor): def _get_profile(self, instance): """Returns profile if and how repre should be color transcoded.""" host_name = instance.context.data["hostName"] - family = instance.data["family"] + product_type = instance.data["productType"] + product_name = instance.data["productName"] task_data = instance.data["anatomyData"].get("task", {}) task_name = task_data.get("name") task_type = task_data.get("type") - subset = instance.data["subset"] filtering_criteria = { "hosts": host_name, - "families": family, + "product_types": product_type, + "product_names": product_name, "task_names": task_name, "task_types": task_type, - "subsets": subset } profile = filter_profiles(self.profiles, filtering_criteria, logger=self.log) @@ -329,9 +331,11 @@ class ExtractOIIOTranscode(publish.Extractor): if not profile: self.log.debug(( "Skipped instance. None of profiles in presets are for" - " Host: \"{}\" | Families: \"{}\" | Task \"{}\"" - " | Task type \"{}\" | Subset \"{}\" " - ).format(host_name, family, task_name, task_type, subset)) + " Host: \"{}\" | Product types: \"{}\" | Product names: \"{}\"" + " | Task name \"{}\" | Task type \"{}\"" + ).format( + host_name, product_type, product_name, task_name, task_type + )) return profile diff --git a/client/ayon_core/plugins/publish/extract_hierarchy_to_ayon.py b/client/ayon_core/plugins/publish/extract_hierarchy_to_ayon.py index 0851b28134..7ceaf7d2ad 100644 --- a/client/ayon_core/plugins/publish/extract_hierarchy_to_ayon.py +++ b/client/ayon_core/plugins/publish/extract_hierarchy_to_ayon.py @@ -45,7 +45,7 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): continue # Skip if instance asset does not match - instance_asset_name = instance.data.get("asset") + instance_asset_name = instance.data.get("folderPath") instances_by_asset_name[instance_asset_name].append(instance) project_doc = context.data["projectEntity"] @@ -189,7 +189,7 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): active_folder_paths = set() for instance in context: if instance.data.get("publish") is not False: - active_folder_paths.add(instance.data.get("asset")) + active_folder_paths.add(instance.data.get("folderPath")) active_folder_paths.discard(None) diff --git a/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py b/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py index c6bdb59f59..a19b5b9090 100644 --- a/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py +++ b/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py @@ -68,7 +68,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): def add_audio_to_instances(self, audio_file, instances): created_files = [] for inst in instances: - name = inst.data["asset"] + name = inst.data["folderPath"] recycling_file = [f for f in created_files if name in f] @@ -103,7 +103,9 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): else: audio_fpath = recycling_file.pop() - if "audio" in (inst.data["families"] + [inst.data["family"]]): + if "audio" in ( + inst.data["families"] + [inst.data["productType"]] + ): # create empty representation attr if "representations" not in inst.data: inst.data["representations"] = [] @@ -140,10 +142,10 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): """ return [ _i for _i in context - # filter only those with audio family + # filter only those with audio product type or family # and also with reviewAudio data key if bool("audio" in ( - _i.data.get("families", []) + [_i.data["family"]]) + _i.data.get("families", []) + [_i.data["productType"]]) ) or _i.data.get("reviewAudio") ] diff --git a/client/ayon_core/plugins/publish/extract_review.py b/client/ayon_core/plugins/publish/extract_review.py index 91d39882ae..905158c851 100644 --- a/client/ayon_core/plugins/publish/extract_review.py +++ b/client/ayon_core/plugins/publish/extract_review.py @@ -74,7 +74,7 @@ class ExtractReview(pyblish.api.InstancePlugin): alpha_exts = ["exr", "png", "dpx"] # Preset attributes - profiles = None + profiles = [] def process(self, instance): self.log.debug(str(instance.data["representations"])) @@ -103,38 +103,38 @@ class ExtractReview(pyblish.api.InstancePlugin): def _get_outputs_for_instance(self, instance): host_name = instance.context.data["hostName"] - family = self.main_family_from_instance(instance) + product_type = instance.data["productType"] self.log.debug("Host: \"{}\"".format(host_name)) - self.log.debug("Family: \"{}\"".format(family)) + self.log.debug("Product type: \"{}\"".format(product_type)) profile = filter_profiles( self.profiles, { "hosts": host_name, - "families": family, + "product_types": product_type, }, logger=self.log) if not profile: self.log.info(( "Skipped instance. None of profiles in presets are for" - " Host: \"{}\" | Family: \"{}\"" - ).format(host_name, family)) + " Host: \"{}\" | Product type: \"{}\"" + ).format(host_name, product_type)) return self.log.debug("Matching profile: \"{}\"".format(json.dumps(profile))) - subset_name = instance.data.get("subset") + product_name = instance.data.get("productName") instance_families = self.families_from_instance(instance) filtered_outputs = self.filter_output_defs( - profile, subset_name, instance_families + profile, product_name, instance_families ) if not filtered_outputs: self.log.info(( "Skipped instance. All output definitions from selected" " profile do not match instance families \"{}\" or" - " subset name \"{}\"." - ).format(str(instance_families), subset_name)) + " product name \"{}\"." + ).format(str(instance_families), product_name)) # Store `filename_suffix` to save arguments profile_outputs = [] @@ -719,12 +719,12 @@ class ExtractReview(pyblish.api.InstancePlugin): lut_filters = self.lut_filters(new_repre, instance, ffmpeg_input_args) ffmpeg_video_filters.extend(lut_filters) - bg_alpha = 0 + bg_alpha = 0.0 bg_color = output_def.get("bg_color") if bg_color: bg_red, bg_green, bg_blue, bg_alpha = bg_color - if bg_alpha > 0: + if bg_alpha > 0.0: if not temp_data["input_allow_bg"]: self.log.info(( "Output definition has defined BG color input was" @@ -734,8 +734,7 @@ class ExtractReview(pyblish.api.InstancePlugin): bg_color_hex = "#{0:0>2X}{1:0>2X}{2:0>2X}".format( bg_red, bg_green, bg_blue ) - bg_color_alpha = float(bg_alpha) / 255 - bg_color_str = "{}@{}".format(bg_color_hex, bg_color_alpha) + bg_color_str = "{}@{}".format(bg_color_hex, bg_alpha) self.log.info("Applying BG color {}".format(bg_color_str)) color_args = [ @@ -1079,7 +1078,7 @@ class ExtractReview(pyblish.api.InstancePlugin): fill_color_hex = "{0:0>2X}{1:0>2X}{2:0>2X}".format( f_red, f_green, f_blue ) - fill_color_alpha = float(f_alpha) / 255 + fill_color_alpha = f_alpha line_thickness = letter_box_def["line_thickness"] line_color = letter_box_def["line_color"] @@ -1087,7 +1086,7 @@ class ExtractReview(pyblish.api.InstancePlugin): line_color_hex = "{0:0>2X}{1:0>2X}{2:0>2X}".format( l_red, l_green, l_blue ) - line_color_alpha = float(l_alpha) / 255 + line_color_alpha = l_alpha # test ratios and define if pillar or letter boxes output_ratio = float(output_width) / float(output_height) @@ -1281,10 +1280,11 @@ class ExtractReview(pyblish.api.InstancePlugin): "FFprobe couldn't read resolution from input file: \"{}\"" ).format(full_input_path_single_file)) - # NOTE Setting only one of `width` or `heigth` is not allowed + # NOTE Setting only one of `width` or `height` is not allowed # - settings value can't have None but has value of 0 - output_width = output_def.get("width") or output_width or None - output_height = output_def.get("height") or output_height or None + output_width = output_def["width"] or output_width or None + output_height = output_def["height"] or output_height or None + # Force to use input resolution if output resolution was not defined # in settings. Resolution from instance is not used when # 'use_input_res' is set to 'True'. @@ -1294,7 +1294,12 @@ class ExtractReview(pyblish.api.InstancePlugin): overscan_color_value = "black" overscan_color = output_def.get("overscan_color") if overscan_color: - bg_red, bg_green, bg_blue, _ = overscan_color + if len(overscan_color) == 3: + bg_red, bg_green, bg_blue = overscan_color + else: + # Backwards compatibility + bg_red, bg_green, bg_blue, _ = overscan_color + overscan_color_value = "#{0:0>2X}{1:0>2X}{2:0>2X}".format( bg_red, bg_green, bg_blue ) @@ -1458,13 +1463,6 @@ class ExtractReview(pyblish.api.InstancePlugin): return filters - def main_family_from_instance(self, instance): - """Returns main family of entered instance.""" - family = instance.data.get("family") - if not family: - family = instance.data["families"][0] - return family - def families_from_instance(self, instance): """Returns all families of entered instance.""" families = [] @@ -1492,7 +1490,7 @@ class ExtractReview(pyblish.api.InstancePlugin): return any(family.lower() in families_filter_lower for family in families) - def filter_output_defs(self, profile, subset_name, families): + def filter_output_defs(self, profile, product_name, families): """Return outputs matching input instance families. Output definitions without families filter are marked as valid. @@ -1500,17 +1498,19 @@ class ExtractReview(pyblish.api.InstancePlugin): Args: profile (dict): Profile from presets matching current context. families (list): All families of current instance. - subset_name (str): name of subset + product_name (str): Product name. Returns: - list: Containg all output definitions matching entered families. + dict[str, Any]: Containing all output definitions matching entered + families. """ - outputs = profile.get("outputs") or {} - if not outputs: - return outputs - filtered_outputs = {} - for filename_suffix, output_def in outputs.items(): + outputs = profile.get("outputs") + if not outputs: + return filtered_outputs + + for output_def in outputs: + filename_suffix = output_def["name"] output_filters = output_def.get("filter") # If no filter on output preset, skip filtering and add output # profile for farther processing @@ -1523,17 +1523,17 @@ class ExtractReview(pyblish.api.InstancePlugin): continue # Subsets name filters - subset_filters = [ - subset_filter - for subset_filter in output_filters.get("subsets", []) + product_name_filters = [ + name_filter + for name_filter in output_filters.get("product_names", []) # Skip empty strings - if subset_filter + if name_filter ] - if subset_name and subset_filters: + if product_name and product_name_filters: match = False - for subset_filter in subset_filters: - compiled = re.compile(subset_filter) - if compiled.search(subset_name): + for product_name_filter in product_name_filters: + compiled = re.compile(product_name_filter) + if compiled.search(product_name): match = True break diff --git a/client/ayon_core/plugins/publish/extract_thumbnail.py b/client/ayon_core/plugins/publish/extract_thumbnail.py index c325ed8363..d1b6e4e0cc 100644 --- a/client/ayon_core/plugins/publish/extract_thumbnail.py +++ b/client/ayon_core/plugins/publish/extract_thumbnail.py @@ -2,6 +2,7 @@ import copy import os import subprocess import tempfile +import re import pyblish.api from ayon_core.lib import ( @@ -35,20 +36,34 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): "traypublisher", "substancepainter", "nuke", + "aftereffects" ] enabled = False integrate_thumbnail = False target_size = { - "type": "resize", - "width": 1920, - "height": 1080 + "type": "source", + "resize": { + "width": 1920, + "height": 1080 + } } - background_color = None + background_color = (0, 0, 0, 0.0) duration_split = 0.5 # attribute presets from settings - oiiotool_defaults = None - ffmpeg_args = None + oiiotool_defaults = { + "type": "colorspace", + "colorspace": "color_picking", + "display_and_view": { + "display": "default", + "view": "sRGB" + } + } + ffmpeg_args = { + "input": [], + "output": [] + } + product_names = [] def process(self, instance): # run main process @@ -71,16 +86,16 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): instance.data["representations"].remove(repre) def _main_process(self, instance): - subset_name = instance.data["subset"] + product_name = instance.data["productName"] instance_repres = instance.data.get("representations") if not instance_repres: self.log.debug(( "Instance {} does not have representations. Skipping" - ).format(subset_name)) + ).format(product_name)) return self.log.debug( - "Processing instance with subset name {}".format(subset_name) + "Processing instance with product name {}".format(product_name) ) # Skip if instance have 'review' key in data set to 'False' @@ -95,14 +110,32 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # skip crypto passes. # TODO: This is just a quick fix and has its own side-effects - it is - # affecting every subset name with `crypto` in its name. + # affecting every prouct name with `crypto` in its name. # This must be solved properly, maybe using tags on # representation that can be determined much earlier and # with better precision. - if "crypto" in subset_name.lower(): + if "crypto" in product_name.lower(): self.log.debug("Skipping crypto passes.") return + # We only want to process the produces needed from settings. + def validate_string_against_patterns(input_str, patterns): + for pattern in patterns: + if re.match(pattern, input_str): + return True + return False + + product_names = self.product_names + if product_names: + result = validate_string_against_patterns( + product_name, product_names + ) + if not result: + self.log.debug(( + "Product name \"{}\" did not match settings filters: {}" + ).format(product_name, product_names)) + return + # first check for any explicitly marked representations for thumbnail explicit_repres = self._get_explicit_repres_for_thumbnail(instance) if explicit_repres: @@ -346,7 +379,6 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): repre_display = colorspace_data.get("display") repre_view = colorspace_data.get("view") - oiio_default_type = None oiio_default_display = None oiio_default_view = None oiio_default_colorspace = None @@ -364,11 +396,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # oiiotool_defaults elif self.oiiotool_defaults: oiio_default_type = self.oiiotool_defaults["type"] - if "colorspace" in oiio_default_type: + if "colorspace" == oiio_default_type: oiio_default_colorspace = self.oiiotool_defaults["colorspace"] else: - oiio_default_display = self.oiiotool_defaults["display"] - oiio_default_view = self.oiiotool_defaults["view"] + display_and_view = self.oiiotool_defaults["display_and_view"] + oiio_default_display = display_and_view["display"] + oiio_default_view = display_and_view["view"] try: convert_colorspace( @@ -492,11 +525,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): input_path, ): # get settings - if self.target_size.get("type") == "source": + if self.target_size["type"] == "source": return [] - target_width = self.target_size["width"] - target_height = self.target_size["height"] + resize = self.target_size["resize"] + target_width = resize["width"] + target_height = resize["height"] # form arg string per application return get_rescaled_command_arguments( diff --git a/client/ayon_core/plugins/publish/extract_thumbnail_from_source.py b/client/ayon_core/plugins/publish/extract_thumbnail_from_source.py index 8d043d700d..7751d73335 100644 --- a/client/ayon_core/plugins/publish/extract_thumbnail_from_source.py +++ b/client/ayon_core/plugins/publish/extract_thumbnail_from_source.py @@ -38,9 +38,9 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): def process(self, instance): self._create_context_thumbnail(instance.context) - subset_name = instance.data["subset"] + product_name = instance.data["productName"] self.log.debug( - "Processing instance with subset name {}".format(subset_name) + "Processing instance with product name {}".format(product_name) ) thumbnail_source = instance.data.get("thumbnailSource") if not thumbnail_source: diff --git a/client/ayon_core/plugins/publish/help/validate_unique_subsets.xml b/client/ayon_core/plugins/publish/help/validate_unique_subsets.xml index b18f046f84..a4b289d848 100644 --- a/client/ayon_core/plugins/publish/help/validate_unique_subsets.xml +++ b/client/ayon_core/plugins/publish/help/validate_unique_subsets.xml @@ -3,11 +3,11 @@ Subset not unique -## Clashing subset names found +## Clashing product names found -Multiples instances from your scene are set to publish into the same asset > subset. +Multiples instances from your scene are set to publish into the same folder > product. - Non unique subset names: '{non_unique}' + Non unique product names: '{non_unique}' ### How to repair? diff --git a/client/ayon_core/plugins/publish/integrate.py b/client/ayon_core/plugins/publish/integrate.py index a67c837daf..12c702c93b 100644 --- a/client/ayon_core/plugins/publish/integrate.py +++ b/client/ayon_core/plugins/publish/integrate.py @@ -6,7 +6,6 @@ import datetime import clique import six -from bson.objectid import ObjectId import pyblish.api from ayon_core.client.operations import ( @@ -61,7 +60,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): """Register publish in the database and transfer files to destinations. Steps: - 1) Register the subset and version + 1) Register the product and version 2) Transfer the representation files to the destination 3) Register the representation @@ -149,8 +148,19 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Representation context keys that should always be written to # the database even if not used by the destination template db_representation_context_keys = [ - "project", "asset", "task", "subset", "version", "representation", - "family", "hierarchy", "username", "user", "output" + "project", + "asset", + "hierarchy", + "folder", + "task", + "product", + "subset", + "family", + "version", + "representation", + "username", + "user", + "output" ] def process(self, instance): @@ -173,7 +183,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.warning(( "Skipping, there are no representations" " to integrate for instance {}" - ).format(instance.data["family"])) + ).format(instance.data["productType"])) return file_transactions = FileTransaction(log=self.log, @@ -206,7 +216,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): if not repres: raise KnownPublishError( "Instance {} has no representations to integrate".format( - instance.data["family"] + instance.data["productType"] ) ) @@ -308,9 +318,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # increase if the file transaction takes a long time. op_session.commit() - self.log.info("Subset '{subset[name]}' version {version[name]} " - "written to database..".format(subset=subset, - version=version)) + self.log.info(( + "Product '{}' version {} written to database.." + ).format(subset["name"], version["name"])) # Process all file transfers of all integrations now self.log.debug("Integrating source files to destination ...") @@ -404,13 +414,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin): def prepare_subset(self, instance, op_session, project_name): asset_doc = instance.data["assetEntity"] - subset_name = instance.data["subset"] - family = instance.data["family"] - self.log.debug("Subset: {}".format(subset_name)) + product_name = instance.data["productName"] + product_type = instance.data["productType"] + self.log.debug("Product: {}".format(product_name)) # Get existing subset if it exists existing_subset_doc = get_subset_by_name( - project_name, subset_name, asset_doc["_id"] + project_name, product_name, asset_doc["_id"] ) # Define subset data @@ -431,12 +441,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin): if existing_subset_doc: subset_id = existing_subset_doc["_id"] subset_doc = new_subset_document( - subset_name, family, asset_doc["_id"], data, subset_id + product_name, product_type, asset_doc["_id"], data, subset_id ) if existing_subset_doc is None: # Create a new subset - self.log.info("Subset '%s' not found, creating ..." % subset_name) + self.log.info( + "Product '%s' not found, creating ..." % product_name + ) op_session.create_entity( project_name, subset_doc["type"], subset_doc ) @@ -456,7 +468,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): update_data ) - self.log.debug("Prepared subset: {}".format(subset_name)) + self.log.debug("Prepared product: {}".format(product_name)) return subset_doc def prepare_version(self, instance, op_session, subset_doc, project_name): @@ -915,13 +927,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Task can be optional in anatomy data host_name = context.data["hostName"] anatomy_data = instance.data["anatomyData"] - family = anatomy_data["family"] + product_type = instance.data["productType"] task_info = anatomy_data.get("task") or {} return get_publish_template_name( project_name, host_name, - family, + product_type, task_name=task_info.get("name"), task_type=task_info.get("type"), project_settings=context.data["project_settings"], @@ -988,7 +1000,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): """ return { - "_id": ObjectId(), "path": self.get_rootless_path(anatomy, path), "size": os.path.getsize(path), "hash": source_hash(path), diff --git a/client/ayon_core/plugins/publish/integrate_hero_version.py b/client/ayon_core/plugins/publish/integrate_hero_version.py index 6dec41b7b0..c275f75118 100644 --- a/client/ayon_core/plugins/publish/integrate_hero_version.py +++ b/client/ayon_core/plugins/publish/integrate_hero_version.py @@ -46,8 +46,18 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): # Can specify representation names that will be ignored (lower case) ignored_representation_names = [] db_representation_context_keys = [ - "project", "asset", "task", "subset", "representation", - "family", "hierarchy", "task", "username", "user" + "project", + "folder", + "asset", + "hierarchy", + "task", + "product", + "subset", + "family", + "representation", + "username", + "user", + "output" ] # QUESTION/TODO this process should happen on server if crashed due to # permissions error on files (files were used or user didn't have perms) @@ -57,8 +67,8 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): def process(self, instance): self.log.debug( - "--- Integration of Hero version for subset `{}` begins.".format( - instance.data.get("subset", str(instance)) + "--- Integration of Hero version for product `{}` begins.".format( + instance.data["productName"] ) ) published_repres = instance.data.get("published_representations") @@ -503,10 +513,10 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): raise self.log.debug(( - "--- hero version integration for subset `{}`" + "--- hero version integration for product `{}`" " seems to be successful." ).format( - instance.data.get("subset", str(instance)) + instance.data["productName"] )) def get_all_files_from_path(self, path): @@ -558,14 +568,12 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): anatomy_data = instance.data["anatomyData"] task_info = anatomy_data.get("task") or {} host_name = instance.context.data["hostName"] - - # TODO raise error if Hero not set? - family = self.main_family_from_instance(instance) + product_type = instance.data["productType"] return get_publish_template_name( project_name, host_name, - family, + product_type, task_info.get("name"), task_info.get("type"), project_settings=instance.context.data["project_settings"], @@ -573,13 +581,6 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): logger=self.log ) - def main_family_from_instance(self, instance): - """Returns main family of entered instance.""" - family = instance.data.get("family") - if not family: - family = instance.data["families"][0] - return family - def copy_file(self, src_path, dst_path): # TODO check drives if are the same to check if cas hardlink dirname = os.path.dirname(dst_path) diff --git a/client/ayon_core/plugins/publish/integrate_inputlinks.py b/client/ayon_core/plugins/publish/integrate_inputlinks.py index da8df53170..f7e802f410 100644 --- a/client/ayon_core/plugins/publish/integrate_inputlinks.py +++ b/client/ayon_core/plugins/publish/integrate_inputlinks.py @@ -61,8 +61,8 @@ class IntegrateInputLinksAYON(pyblish.api.ContextPlugin): "Instance {} doesn't have version.".format(instance)) continue - family = instance.data.get("family") - if family == "workfile": + product_type = instance.data["productType"] + if product_type == "workfile": workfile_instance = instance else: other_instances.append(instance) @@ -107,7 +107,7 @@ class IntegrateInputLinksAYON(pyblish.api.ContextPlugin): self.add_link( new_links_by_type, "reference", - version["version"], + version["version_id"], workfile_version_id, ) diff --git a/client/ayon_core/plugins/publish/integrate_subset_group.py b/client/ayon_core/plugins/publish/integrate_product_group.py similarity index 74% rename from client/ayon_core/plugins/publish/integrate_subset_group.py rename to client/ayon_core/plugins/publish/integrate_product_group.py index c2f1eac9e3..f69e7744d9 100644 --- a/client/ayon_core/plugins/publish/integrate_subset_group.py +++ b/client/ayon_core/plugins/publish/integrate_product_group.py @@ -17,37 +17,37 @@ from ayon_core.lib import ( ) -class IntegrateSubsetGroup(pyblish.api.InstancePlugin): +class IntegrateProductGroup(pyblish.api.InstancePlugin): """Integrate Subset Group for publish.""" # Run after CollectAnatomyInstanceData order = pyblish.api.IntegratorOrder - 0.1 - label = "Subset Group" + label = "Product Group" # Attributes set by settings - subset_grouping_profiles = None + product_grouping_profiles = None def process(self, instance): - """Look into subset group profiles set by settings. + """Look into product group profiles set by settings. - Attribute 'subset_grouping_profiles' is defined by settings. + Attribute 'product_grouping_profiles' is defined by settings. """ - # Skip if 'subset_grouping_profiles' is empty - if not self.subset_grouping_profiles: + # Skip if 'product_grouping_profiles' is empty + if not self.product_grouping_profiles: return if instance.data.get("subsetGroup"): # If subsetGroup is already set then allow that value to remain self.log.debug(( - "Skipping collect subset group due to existing value: {}" + "Skipping collect product group due to existing value: {}" ).format(instance.data["subsetGroup"])) return # Skip if there is no matching profile filter_criteria = self.get_profile_filter_criteria(instance) profile = filter_profiles( - self.subset_grouping_profiles, + self.product_grouping_profiles, filter_criteria, logger=self.log ) @@ -56,12 +56,18 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin): return template = profile["template"] + product_name = instance.data["productName"] + product_type = instance.data["productType"] fill_pairs = prepare_template_data({ - "family": filter_criteria["families"], + "family": product_type, "task": filter_criteria["tasks"], "host": filter_criteria["hosts"], - "subset": instance.data["subset"], + "subset": product_name, + "product": { + "name": product_name, + "type": product_type, + }, "renderlayer": instance.data.get("renderlayer") }) @@ -91,7 +97,7 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin): # Return filter criteria return { - "families": anatomy_data["family"], + "product_types": instance.data["productType"], "tasks": task.get("name"), "hosts": instance.context.data["hostName"], "task_types": task.get("type") diff --git a/client/ayon_core/plugins/publish/integrate_thumbnail.py b/client/ayon_core/plugins/publish/integrate_thumbnail.py index dd3fdd5073..9eb649d5a0 100644 --- a/client/ayon_core/plugins/publish/integrate_thumbnail.py +++ b/client/ayon_core/plugins/publish/integrate_thumbnail.py @@ -42,10 +42,6 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin): label = "Integrate Thumbnails to AYON" order = pyblish.api.IntegratorOrder + 0.01 - required_context_keys = [ - "project", "asset", "task", "subset", "version" - ] - def process(self, context): # Filter instances which can be used for integration filtered_instance_items = self._prepare_instances(context) @@ -196,12 +192,13 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin): )) asset_entity = instance.data["assetEntity"] + folder_path = instance.data["folderPath"] thumbnail_info_by_entity_id[asset_entity["_id"]] = { "thumbnail_id": thumbnail_id, "entity_type": "asset", } - self.log.debug("Setting thumbnail for asset \"{}\" <{}>".format( - asset_entity["name"], version_id + self.log.debug("Setting thumbnail for folder \"{}\" <{}>".format( + folder_path, version_id )) op_session = OperationsSession() diff --git a/client/ayon_core/plugins/publish/integrate_version_attrs.py b/client/ayon_core/plugins/publish/integrate_version_attrs.py index 5b5ec9cf5b..bc09af9db0 100644 --- a/client/ayon_core/plugins/publish/integrate_version_attrs.py +++ b/client/ayon_core/plugins/publish/integrate_version_attrs.py @@ -82,6 +82,6 @@ class IntegrateVersionAttributes(pyblish.api.ContextPlugin): return ( instance.data.get("label") or instance.data.get("name") - or instance.data.get("subset") + or instance.data.get("productName") or str(instance) ) diff --git a/client/ayon_core/plugins/publish/preintegrate_thumbnail_representation.py b/client/ayon_core/plugins/publish/preintegrate_thumbnail_representation.py index fc60948139..8bd67c0183 100644 --- a/client/ayon_core/plugins/publish/preintegrate_thumbnail_representation.py +++ b/client/ayon_core/plugins/publish/preintegrate_thumbnail_representation.py @@ -37,8 +37,8 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin): if not thumbnail_repres: return - family = instance.data["family"] - subset_name = instance.data["subset"] + product_type = instance.data["productType"] + product_name = instance.data["productName"] host_name = instance.context.data["hostName"] anatomy_data = instance.data["anatomyData"] @@ -50,8 +50,8 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin): "hosts": host_name, "task_names": task.get("name"), "task_types": task.get("type"), - "families": family, - "subsets": subset_name, + "product_types": product_type, + "product_names": product_name, }, logger=self.log ) diff --git a/client/ayon_core/plugins/publish/validate_editorial_asset_name.py b/client/ayon_core/plugins/publish/validate_editorial_asset_name.py index d40263d7f3..dd1a19f602 100644 --- a/client/ayon_core/plugins/publish/validate_editorial_asset_name.py +++ b/client/ayon_core/plugins/publish/validate_editorial_asset_name.py @@ -113,7 +113,7 @@ class ValidateEditorialAssetName(pyblish.api.ContextPlugin): def get_parents(self, context): return_dict = {} for instance in context: - asset = instance.data["asset"] + asset = instance.data["folderPath"] families = instance.data.get("families", []) + [ instance.data["family"] ] diff --git a/client/ayon_core/plugins/publish/validate_publish_dir.py b/client/ayon_core/plugins/publish/validate_publish_dir.py index 5827774cca..f89a7c6810 100644 --- a/client/ayon_core/plugins/publish/validate_publish_dir.py +++ b/client/ayon_core/plugins/publish/validate_publish_dir.py @@ -21,7 +21,7 @@ class ValidatePublishDir(pyblish.api.InstancePlugin): checked_template_names = ["source"] # validate instances might have interim family, needs to be mapped to final - family_mapping = { + product_type_mapping = { "renderLayer": "render", "renderLocal": "render" } @@ -39,7 +39,7 @@ class ValidatePublishDir(pyblish.api.InstancePlugin): self, "Instance meant for in place publishing." " Its 'originalDirname' must be collected." - " Contact OP developer to modify collector." + " Contact AYON developer to modify collector." ) anatomy = instance.context.data["anatomy"] @@ -62,15 +62,17 @@ class ValidatePublishDir(pyblish.api.InstancePlugin): """Find template which will be used during integration.""" project_name = instance.context.data["projectName"] host_name = instance.context.data["hostName"] + product_type = instance.data["productType"] + mapped_product_type = ( + self.product_type_mapping.get(product_type) or product_type + ) anatomy_data = instance.data["anatomyData"] - family = anatomy_data["family"] - family = self.family_mapping.get(family) or family task_info = anatomy_data.get("task") or {} return get_publish_template_name( project_name, host_name, - family, + mapped_product_type, task_name=task_info.get("name"), task_type=task_info.get("type"), project_settings=instance.context.data["project_settings"], diff --git a/client/ayon_core/plugins/publish/validate_unique_subsets.py b/client/ayon_core/plugins/publish/validate_unique_subsets.py index 75d12f8e01..3144675c50 100644 --- a/client/ayon_core/plugins/publish/validate_unique_subsets.py +++ b/client/ayon_core/plugins/publish/validate_unique_subsets.py @@ -6,17 +6,17 @@ from ayon_core.pipeline.publish import ( class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): - """Validate all subset names are unique. + """Validate all product names are unique. This only validates whether the instances currently set to publish from - the workfile overlap one another for the asset + subset they are publishing + the workfile overlap one another for the asset + product they are publishing to. This does not perform any check against existing publishes in the database - since it is allowed to publish into existing subsets resulting in + since it is allowed to publish into existing products resulting in versioning. - A subset may appear twice to publish from the workfile if one + A product may appear twice to publish from the workfile if one of them is set to publish to another asset than the other. """ @@ -27,8 +27,8 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): def process(self, context): - # Find instance per (asset,subset) - instance_per_asset_subset = defaultdict(list) + # Find instance per (asset,product) + instance_per_asset_product = defaultdict(list) for instance in context: # Ignore disabled instances @@ -36,36 +36,36 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): continue # Ignore instance without asset data - asset = instance.data.get("asset") + asset = instance.data.get("folderPath") if asset is None: self.log.warning("Instance found without `asset` data: " "{}".format(instance.name)) continue - # Ignore instance without subset data - subset = instance.data.get("subset") - if subset is None: - self.log.warning("Instance found without `subset` data: " - "{}".format(instance.name)) + # Ignore instance without product data + product_name = instance.data.get("productName") + if product_name is None: + self.log.warning(( + "Instance found without `productName` in data: {}" + ).format(instance.name)) continue - instance_per_asset_subset[(asset, subset)].append(instance) + instance_per_asset_product[(asset, product_name)].append(instance) non_unique = [] - for (asset, subset), instances in instance_per_asset_subset.items(): + for (asset, product_name), instances in instance_per_asset_product.items(): - # A single instance per asset, subset is fine + # A single instance per asset, product is fine if len(instances) < 2: continue - non_unique.append("{asset} > {subset}".format(asset=asset, - subset=subset)) + non_unique.append("{} > {}".format(asset, product_name)) if not non_unique: # All is ok return - msg = ("Instance subset names {} are not unique. ".format(non_unique) + + msg = ("Instance product names {} are not unique. ".format(non_unique) + "Please remove or rename duplicates.") formatting_data = { "non_unique": ",".join(non_unique) diff --git a/client/ayon_core/resources/app_icons/openrv.png b/client/ayon_core/resources/app_icons/openrv.png new file mode 100644 index 0000000000..30077b38e8 Binary files /dev/null and b/client/ayon_core/resources/app_icons/openrv.png differ diff --git a/client/ayon_core/scripts/non_python_host_launch.py b/client/ayon_core/scripts/non_python_host_launch.py index 97632e98ad..4c18fd0ccc 100644 --- a/client/ayon_core/scripts/non_python_host_launch.py +++ b/client/ayon_core/scripts/non_python_host_launch.py @@ -79,7 +79,7 @@ def main(argv): if after_script_idx is not None: launch_args = sys_args[after_script_idx:] - host_name = os.environ["AVALON_APP"].lower() + host_name = os.environ["AYON_HOST_NAME"].lower() if host_name == "photoshop": # TODO refactor launch logic according to AE from ayon_core.hosts.photoshop.api.lib import main @@ -90,7 +90,7 @@ def main(argv): else: title = "Unknown host name" message = ( - "BUG: Environment variable AVALON_APP contains unknown" + "BUG: Environment variable AYON_HOST_NAME contains unknown" " host name \"{}\"" ).format(host_name) show_error_messagebox(title, message) diff --git a/client/ayon_core/scripts/remote_publish.py b/client/ayon_core/scripts/remote_publish.py deleted file mode 100644 index 7e7bf2493b..0000000000 --- a/client/ayon_core/scripts/remote_publish.py +++ /dev/null @@ -1,12 +0,0 @@ -try: - from ayon_core.lib import Logger - from ayon_core.pipeline.publish.lib import remote_publish -except ImportError as exc: - # Ensure Deadline fails by output an error that contains "Fatal Error:" - raise ImportError("Fatal Error: %s" % exc) - - -if __name__ == "__main__": - # Perform remote publish with thorough error checking - log = Logger.get_logger(__name__) - remote_publish(log) diff --git a/client/ayon_core/settings/__init__.py b/client/ayon_core/settings/__init__.py index 51019ca570..ca76c550b8 100644 --- a/client/ayon_core/settings/__init__.py +++ b/client/ayon_core/settings/__init__.py @@ -1,25 +1,16 @@ -from .constants import ( - SYSTEM_SETTINGS_KEY, - PROJECT_SETTINGS_KEY, -) from .lib import ( - get_general_environments, - get_global_settings, - get_system_settings, + get_ayon_settings, + get_studio_settings, get_project_settings, + get_general_environments, get_current_project_settings, - get_local_settings, ) __all__ = ( - "SYSTEM_SETTINGS_KEY", - "PROJECT_SETTINGS_KEY", - + "get_ayon_settings", + "get_studio_settings", "get_general_environments", - "get_global_settings", - "get_system_settings", "get_project_settings", "get_current_project_settings", - "get_local_settings", ) diff --git a/client/ayon_core/settings/ayon_settings.py b/client/ayon_core/settings/ayon_settings.py deleted file mode 100644 index ed1199d517..0000000000 --- a/client/ayon_core/settings/ayon_settings.py +++ /dev/null @@ -1,1554 +0,0 @@ -"""Helper functionality to convert AYON settings to OpenPype v3 settings. - -The settings are converted, so we can use v3 code with AYON settings. Once -the code of and addon is converted to full AYON addon which expect AYON -settings the conversion function can be removed. - -The conversion is hardcoded -> there is no other way how to achieve the result. - -Main entrypoints are functions: -- convert_project_settings - convert settings to project settings -- convert_system_settings - convert settings to system settings -# Both getters cache values -- get_ayon_project_settings - replacement for 'get_project_settings' -- get_ayon_system_settings - replacement for 'get_system_settings' -""" -import os -import collections -import json -import copy -import time - -import six - -from ayon_core.client import get_ayon_server_api_connection - - -def _convert_color(color_value): - if isinstance(color_value, six.string_types): - color_value = color_value.lstrip("#") - color_value_len = len(color_value) - _color_value = [] - for idx in range(color_value_len // 2): - _color_value.append(int(color_value[idx:idx + 2], 16)) - for _ in range(4 - len(_color_value)): - _color_value.append(255) - return _color_value - - if isinstance(color_value, list): - # WARNING R,G,B can be 'int' or 'float' - # - 'float' variant is using 'int' for min: 0 and max: 1 - if len(color_value) == 3: - # Add alpha - color_value.append(255) - else: - # Convert float alha to int - alpha = int(color_value[3] * 255) - if alpha > 255: - alpha = 255 - elif alpha < 0: - alpha = 0 - color_value[3] = alpha - return color_value - - -def _convert_host_imageio(host_settings): - if "imageio" not in host_settings: - return - - # --- imageio --- - ayon_imageio = host_settings["imageio"] - # TODO remove when fixed on server - if "ocio_config" in ayon_imageio["ocio_config"]: - ayon_imageio["ocio_config"]["filepath"] = ( - ayon_imageio["ocio_config"].pop("ocio_config") - ) - # Convert file rules - imageio_file_rules = ayon_imageio["file_rules"] - new_rules = {} - for rule in imageio_file_rules["rules"]: - name = rule.pop("name") - new_rules[name] = rule - imageio_file_rules["rules"] = new_rules - - -def _convert_applications_groups(groups, clear_metadata): - environment_key = "environment" - if isinstance(groups, dict): - new_groups = [] - for name, item in groups.items(): - item["name"] = name - new_groups.append(item) - groups = new_groups - - output = {} - group_dynamic_labels = {} - for group in groups: - group_name = group.pop("name") - if "label" in group: - group_dynamic_labels[group_name] = group["label"] - - tool_group_envs = group[environment_key] - if isinstance(tool_group_envs, six.string_types): - group[environment_key] = json.loads(tool_group_envs) - - variants = {} - variant_dynamic_labels = {} - for variant in group.pop("variants"): - variant_name = variant.pop("name") - label = variant.get("label") - if label and label != variant_name: - variant_dynamic_labels[variant_name] = label - variant_envs = variant[environment_key] - if isinstance(variant_envs, six.string_types): - variant[environment_key] = json.loads(variant_envs) - variants[variant_name] = variant - group["variants"] = variants - - if not clear_metadata: - variants["__dynamic_keys_labels__"] = variant_dynamic_labels - output[group_name] = group - - if not clear_metadata: - output["__dynamic_keys_labels__"] = group_dynamic_labels - return output - - -def _convert_applications_system_settings( - ayon_settings, output, clear_metadata -): - # Addon settings - addon_settings = ayon_settings["applications"] - - # Remove project settings - addon_settings.pop("only_available", None) - - # Applications settings - ayon_apps = addon_settings["applications"] - - additional_apps = ayon_apps.pop("additional_apps") - applications = _convert_applications_groups( - ayon_apps, clear_metadata - ) - applications["additional_apps"] = _convert_applications_groups( - additional_apps, clear_metadata - ) - - # Tools settings - tools = _convert_applications_groups( - addon_settings["tool_groups"], clear_metadata - ) - - output["applications"] = applications - output["tools"] = {"tool_groups": tools} - - -def _convert_general(ayon_settings, output, default_settings): - # TODO get studio name/code - core_settings = ayon_settings["core"] - environments = core_settings["environments"] - if isinstance(environments, six.string_types): - environments = json.loads(environments) - - general = default_settings["general"] - general.update({ - "log_to_server": False, - "studio_name": core_settings["studio_name"], - "studio_code": core_settings["studio_code"], - "environment": environments - }) - output["general"] = general - - -def _convert_kitsu_system_settings( - ayon_settings, output, addon_versions, default_settings -): - enabled = addon_versions.get("kitsu") is not None - kitsu_settings = default_settings["modules"]["kitsu"] - kitsu_settings["enabled"] = enabled - if enabled: - kitsu_settings["server"] = ayon_settings["kitsu"]["server"] - output["modules"]["kitsu"] = kitsu_settings - - -def _convert_timers_manager_system_settings( - ayon_settings, output, addon_versions, default_settings -): - enabled = addon_versions.get("timers_manager") is not None - manager_settings = default_settings["modules"]["timers_manager"] - manager_settings["enabled"] = enabled - if enabled: - ayon_manager = ayon_settings["timers_manager"] - manager_settings.update({ - key: ayon_manager[key] - for key in { - "auto_stop", - "full_time", - "message_time", - "disregard_publishing" - } - }) - output["modules"]["timers_manager"] = manager_settings - - -def _convert_clockify_system_settings( - ayon_settings, output, addon_versions, default_settings -): - enabled = addon_versions.get("clockify") is not None - clockify_settings = default_settings["modules"]["clockify"] - clockify_settings["enabled"] = enabled - if enabled: - clockify_settings["workspace_name"] = ( - ayon_settings["clockify"]["workspace_name"] - ) - output["modules"]["clockify"] = clockify_settings - - -def _convert_deadline_system_settings( - ayon_settings, output, addon_versions, default_settings -): - enabled = addon_versions.get("deadline") is not None - deadline_settings = default_settings["modules"]["deadline"] - deadline_settings["enabled"] = enabled - if enabled: - ayon_deadline = ayon_settings["deadline"] - deadline_settings["deadline_urls"] = { - item["name"]: item["value"] - for item in ayon_deadline["deadline_urls"] - } - - output["modules"]["deadline"] = deadline_settings - - -def _convert_royalrender_system_settings( - ayon_settings, output, addon_versions, default_settings -): - enabled = addon_versions.get("royalrender") is not None - rr_settings = default_settings["modules"]["royalrender"] - rr_settings["enabled"] = enabled - if enabled: - ayon_royalrender = ayon_settings["royalrender"] - rr_settings["rr_paths"] = { - item["name"]: item["value"] - for item in ayon_royalrender["rr_paths"] - } - output["modules"]["royalrender"] = rr_settings - - -def _convert_modules_system( - ayon_settings, output, addon_versions, default_settings -): - # TODO add all modules - # TODO add 'enabled' values - for func in ( - _convert_kitsu_system_settings, - _convert_timers_manager_system_settings, - _convert_clockify_system_settings, - _convert_deadline_system_settings, - _convert_royalrender_system_settings, - ): - func(ayon_settings, output, addon_versions, default_settings) - - modules_settings = output["modules"] - for module_name in ( - "sync_server", - "log_viewer", - "standalonepublish_tool", - "project_manager", - "job_queue", - "avalon", - "addon_paths", - ): - settings = default_settings["modules"][module_name] - if "enabled" in settings: - settings["enabled"] = False - modules_settings[module_name] = settings - - for key, value in ayon_settings.items(): - if key not in output: - output[key] = value - - # Make sure addons have access to settings in initialization - # - AddonsManager passes only modules settings into initialization - if key not in modules_settings: - modules_settings[key] = value - - -def is_dev_mode_enabled(): - """Dev mode is enabled in AYON. - - Returns: - bool: True if dev mode is enabled. - """ - - return os.getenv("AYON_USE_DEV") == "1" - - -def convert_system_settings(ayon_settings, default_settings, addon_versions): - default_settings = copy.deepcopy(default_settings) - output = { - "modules": {} - } - if "applications" in ayon_settings: - _convert_applications_system_settings(ayon_settings, output, False) - - if "core" in ayon_settings: - _convert_general(ayon_settings, output, default_settings) - - for key, value in ayon_settings.items(): - if key not in output: - output[key] = value - - for key, value in default_settings.items(): - if key not in output: - output[key] = value - - _convert_modules_system( - ayon_settings, - output, - addon_versions, - default_settings - ) - return output - - -# --------- Project settings --------- -def _convert_applications_project_settings(ayon_settings, output): - if "applications" not in ayon_settings: - return - - output["applications"] = { - "only_available": ayon_settings["applications"]["only_available"] - } - - -def _convert_blender_project_settings(ayon_settings, output): - if "blender" not in ayon_settings: - return - ayon_blender = ayon_settings["blender"] - _convert_host_imageio(ayon_blender) - - ayon_publish = ayon_blender["publish"] - - for plugin in ("ExtractThumbnail", "ExtractPlayblast"): - plugin_settings = ayon_publish[plugin] - plugin_settings["presets"] = json.loads(plugin_settings["presets"]) - - output["blender"] = ayon_blender - - -def _convert_celaction_project_settings(ayon_settings, output): - if "celaction" not in ayon_settings: - return - - ayon_celaction = ayon_settings["celaction"] - _convert_host_imageio(ayon_celaction) - - output["celaction"] = ayon_celaction - - -def _convert_flame_project_settings(ayon_settings, output): - if "flame" not in ayon_settings: - return - - ayon_flame = ayon_settings["flame"] - - ayon_publish_flame = ayon_flame["publish"] - # Plugin 'ExtractSubsetResources' renamed to 'ExtractProductResources' - if "ExtractSubsetResources" in ayon_publish_flame: - ayon_product_resources = ayon_publish_flame["ExtractSubsetResources"] - else: - ayon_product_resources = ( - ayon_publish_flame.pop("ExtractProductResources")) - ayon_publish_flame["ExtractSubsetResources"] = ayon_product_resources - - # 'ExtractSubsetResources' changed model of 'export_presets_mapping' - # - some keys were moved under 'other_parameters' - new_subset_resources = {} - for item in ayon_product_resources.pop("export_presets_mapping"): - name = item.pop("name") - if "other_parameters" in item: - other_parameters = item.pop("other_parameters") - item.update(other_parameters) - new_subset_resources[name] = item - - ayon_product_resources["export_presets_mapping"] = new_subset_resources - - # 'imageio' changed model - # - missing subkey 'project' which is in root of 'imageio' model - _convert_host_imageio(ayon_flame) - ayon_imageio_flame = ayon_flame["imageio"] - if "project" not in ayon_imageio_flame: - profile_mapping = ayon_imageio_flame.pop("profilesMapping") - ayon_flame["imageio"] = { - "project": ayon_imageio_flame, - "profilesMapping": profile_mapping - } - - ayon_load_flame = ayon_flame["load"] - for plugin_name in ("LoadClip", "LoadClipBatch"): - plugin_settings = ayon_load_flame[plugin_name] - plugin_settings["families"] = plugin_settings.pop("product_types") - plugin_settings["clip_name_template"] = ( - plugin_settings["clip_name_template"] - .replace("{folder[name]}", "{asset}") - .replace("{product[name]}", "{subset}") - ) - plugin_settings["layer_rename_template"] = ( - plugin_settings["layer_rename_template"] - .replace("{folder[name]}", "{asset}") - .replace("{product[name]}", "{subset}") - ) - - output["flame"] = ayon_flame - - -def _convert_fusion_project_settings(ayon_settings, output): - if "fusion" not in ayon_settings: - return - - ayon_fusion = ayon_settings["fusion"] - _convert_host_imageio(ayon_fusion) - - ayon_imageio_fusion = ayon_fusion["imageio"] - - if "ocioSettings" in ayon_imageio_fusion: - ayon_ocio_setting = ayon_imageio_fusion.pop("ocioSettings") - paths = ayon_ocio_setting.pop("ocioPathModel") - for key, value in tuple(paths.items()): - new_value = [] - if value: - new_value.append(value) - paths[key] = new_value - - ayon_ocio_setting["configFilePath"] = paths - ayon_imageio_fusion["ocio"] = ayon_ocio_setting - elif "ocio" in ayon_imageio_fusion: - paths = ayon_imageio_fusion["ocio"].pop("configFilePath") - for key, value in tuple(paths.items()): - new_value = [] - if value: - new_value.append(value) - paths[key] = new_value - ayon_imageio_fusion["ocio"]["configFilePath"] = paths - - _convert_host_imageio(ayon_imageio_fusion) - - ayon_create_saver = ayon_fusion["create"]["CreateSaver"] - ayon_create_saver["temp_rendering_path_template"] = ( - ayon_create_saver["temp_rendering_path_template"] - .replace("{product[name]}", "{subset}") - .replace("{product[type]}", "{family}") - .replace("{folder[name]}", "{asset}") - .replace("{task[name]}", "{task}") - ) - - output["fusion"] = ayon_fusion - - -def _convert_maya_project_settings(ayon_settings, output): - if "maya" not in ayon_settings: - return - - ayon_maya = ayon_settings["maya"] - - # Change key of render settings - ayon_maya["RenderSettings"] = ayon_maya.pop("render_settings") - - # Convert extensions mapping - ayon_maya["ext_mapping"] = { - item["name"]: item["value"] - for item in ayon_maya["ext_mapping"] - } - - # Maya dirmap - ayon_maya_dirmap = ayon_maya.pop("maya_dirmap") - ayon_maya_dirmap_path = ayon_maya_dirmap["paths"] - ayon_maya_dirmap_path["source-path"] = ( - ayon_maya_dirmap_path.pop("source_path") - ) - ayon_maya_dirmap_path["destination-path"] = ( - ayon_maya_dirmap_path.pop("destination_path") - ) - ayon_maya["maya-dirmap"] = ayon_maya_dirmap - - # Create plugins - ayon_create = ayon_maya["create"] - ayon_create_static_mesh = ayon_create["CreateUnrealStaticMesh"] - if "static_mesh_prefixes" in ayon_create_static_mesh: - ayon_create_static_mesh["static_mesh_prefix"] = ( - ayon_create_static_mesh.pop("static_mesh_prefixes") - ) - - # --- Publish (START) --- - ayon_publish = ayon_maya["publish"] - try: - attributes = json.loads( - ayon_publish["ValidateAttributes"]["attributes"] - ) - except ValueError: - attributes = {} - ayon_publish["ValidateAttributes"]["attributes"] = attributes - - try: - SUFFIX_NAMING_TABLE = json.loads( - ayon_publish - ["ValidateTransformNamingSuffix"] - ["SUFFIX_NAMING_TABLE"] - ) - except ValueError: - SUFFIX_NAMING_TABLE = {} - ayon_publish["ValidateTransformNamingSuffix"]["SUFFIX_NAMING_TABLE"] = ( - SUFFIX_NAMING_TABLE - ) - - validate_frame_range = ayon_publish["ValidateFrameRange"] - if "exclude_product_types" in validate_frame_range: - validate_frame_range["exclude_families"] = ( - validate_frame_range.pop("exclude_product_types")) - - # Extract playblast capture settings - validate_rendern_settings = ayon_publish["ValidateRenderSettings"] - for key in ( - "arnold_render_attributes", - "vray_render_attributes", - "redshift_render_attributes", - "renderman_render_attributes", - ): - if key not in validate_rendern_settings: - continue - validate_rendern_settings[key] = [ - [item["type"], item["value"]] - for item in validate_rendern_settings[key] - ] - - plugin_path_attributes = ayon_publish["ValidatePluginPathAttributes"] - plugin_path_attributes["attribute"] = { - item["name"]: item["value"] - for item in plugin_path_attributes["attribute"] - } - - ayon_capture_preset = ayon_publish["ExtractPlayblast"]["capture_preset"] - display_options = ayon_capture_preset["DisplayOptions"] - for key in ("background", "backgroundBottom", "backgroundTop"): - display_options[key] = _convert_color(display_options[key]) - - for src_key, dst_key in ( - ("DisplayOptions", "Display Options"), - ("ViewportOptions", "Viewport Options"), - ("CameraOptions", "Camera Options"), - ): - ayon_capture_preset[dst_key] = ayon_capture_preset.pop(src_key) - - viewport_options = ayon_capture_preset["Viewport Options"] - viewport_options["pluginObjects"] = { - item["name"]: item["value"] - for item in viewport_options["pluginObjects"] - } - - ayon_playblast_settings = ayon_publish["ExtractPlayblast"]["profiles"] - if ayon_playblast_settings: - for setting in ayon_playblast_settings: - capture_preset = setting["capture_preset"] - display_options = capture_preset["DisplayOptions"] - for key in ("background", "backgroundBottom", "backgroundTop"): - display_options[key] = _convert_color(display_options[key]) - - for src_key, dst_key in ( - ("DisplayOptions", "Display Options"), - ("ViewportOptions", "Viewport Options"), - ("CameraOptions", "Camera Options"), - ): - capture_preset[dst_key] = capture_preset.pop(src_key) - - viewport_options = capture_preset["Viewport Options"] - viewport_options["pluginObjects"] = { - item["name"]: item["value"] - for item in viewport_options["pluginObjects"] - } - - # Extract Camera Alembic bake attributes - try: - bake_attributes = json.loads( - ayon_publish["ExtractCameraAlembic"]["bake_attributes"] - ) - except ValueError: - bake_attributes = [] - ayon_publish["ExtractCameraAlembic"]["bake_attributes"] = bake_attributes - - # --- Publish (END) --- - for renderer_settings in ayon_maya["RenderSettings"].values(): - if ( - not isinstance(renderer_settings, dict) - or "additional_options" not in renderer_settings - ): - continue - renderer_settings["additional_options"] = [ - [item["attribute"], item["value"]] - for item in renderer_settings["additional_options"] - ] - - # Workfile build - ayon_workfile_build = ayon_maya["workfile_build"] - for item in ayon_workfile_build["profiles"]: - for key in ("current_context", "linked_assets"): - for subitem in item[key]: - if "families" in subitem: - break - subitem["families"] = subitem.pop("product_types") - subitem["subset_name_filters"] = subitem.pop( - "product_name_filters") - - _convert_host_imageio(ayon_maya) - - ayon_maya_load = ayon_maya["load"] - load_colors = ayon_maya_load["colors"] - for key, color in tuple(load_colors.items()): - load_colors[key] = _convert_color(color) - - reference_loader = ayon_maya_load["reference_loader"] - reference_loader["namespace"] = ( - reference_loader["namespace"] - .replace("{product[name]}", "{subset}") - ) - - if ayon_maya_load.get("import_loader"): - import_loader = ayon_maya_load["import_loader"] - import_loader["namespace"] = ( - import_loader["namespace"] - .replace("{product[name]}", "{subset}") - ) - - output["maya"] = ayon_maya - - -def _convert_3dsmax_project_settings(ayon_settings, output): - if "max" not in ayon_settings: - return - - ayon_max = ayon_settings["max"] - _convert_host_imageio(ayon_max) - if "PointCloud" in ayon_max: - point_cloud_attribute = ayon_max["PointCloud"]["attribute"] - new_point_cloud_attribute = { - item["name"]: item["value"] - for item in point_cloud_attribute - } - ayon_max["PointCloud"]["attribute"] = new_point_cloud_attribute - # --- Publish (START) --- - ayon_publish = ayon_max["publish"] - if "ValidateAttributes" in ayon_publish: - try: - attributes = json.loads( - ayon_publish["ValidateAttributes"]["attributes"] - ) - except ValueError: - attributes = {} - ayon_publish["ValidateAttributes"]["attributes"] = attributes - - if "ValidateLoadedPlugin" in ayon_publish: - loaded_plugin = ( - ayon_publish["ValidateLoadedPlugin"]["family_plugins_mapping"] - ) - for item in loaded_plugin: - item["families"] = item.pop("product_types") - - output["max"] = ayon_max - - -def _convert_nuke_knobs(knobs): - new_knobs = [] - for knob in knobs: - knob_type = knob["type"] - - if knob_type == "boolean": - knob_type = "bool" - - if knob_type != "bool": - value = knob[knob_type] - elif knob_type in knob: - value = knob[knob_type] - else: - value = knob["boolean"] - - new_knob = { - "type": knob_type, - "name": knob["name"], - } - new_knobs.append(new_knob) - - if knob_type == "formatable": - new_knob["template"] = value["template"] - new_knob["to_type"] = value["to_type"] - continue - - value_key = "value" - if knob_type == "expression": - value_key = "expression" - - elif knob_type == "color_gui": - value = _convert_color(value) - - elif knob_type == "vector_2d": - value = [value["x"], value["y"]] - - elif knob_type == "vector_3d": - value = [value["x"], value["y"], value["z"]] - - elif knob_type == "box": - value = [value["x"], value["y"], value["r"], value["t"]] - - new_knob[value_key] = value - return new_knobs - - -def _convert_nuke_project_settings(ayon_settings, output): - if "nuke" not in ayon_settings: - return - - ayon_nuke = ayon_settings["nuke"] - - # --- Dirmap --- - dirmap = ayon_nuke.pop("dirmap") - for src_key, dst_key in ( - ("source_path", "source-path"), - ("destination_path", "destination-path"), - ): - dirmap["paths"][dst_key] = dirmap["paths"].pop(src_key) - ayon_nuke["nuke-dirmap"] = dirmap - - # --- Load --- - ayon_load = ayon_nuke["load"] - ayon_load["LoadClip"]["_representations"] = ( - ayon_load["LoadClip"].pop("representations_include") - ) - ayon_load["LoadImage"]["_representations"] = ( - ayon_load["LoadImage"].pop("representations_include") - ) - - # --- Create --- - ayon_create = ayon_nuke["create"] - for creator_name in ( - "CreateWritePrerender", - "CreateWriteImage", - "CreateWriteRender", - ): - create_plugin_settings = ayon_create[creator_name] - create_plugin_settings["temp_rendering_path_template"] = ( - create_plugin_settings["temp_rendering_path_template"] - .replace("{product[name]}", "{subset}") - .replace("{product[type]}", "{family}") - .replace("{task[name]}", "{task}") - .replace("{folder[name]}", "{asset}") - ) - new_prenodes = {} - for prenode in create_plugin_settings["prenodes"]: - name = prenode.pop("name") - prenode["knobs"] = _convert_nuke_knobs(prenode["knobs"]) - new_prenodes[name] = prenode - - create_plugin_settings["prenodes"] = new_prenodes - - # --- Publish --- - ayon_publish = ayon_nuke["publish"] - slate_mapping = ayon_publish["ExtractSlateFrame"]["key_value_mapping"] - for key in tuple(slate_mapping.keys()): - value = slate_mapping[key] - slate_mapping[key] = [value["enabled"], value["template"]] - - ayon_publish["ValidateKnobs"]["knobs"] = json.loads( - ayon_publish["ValidateKnobs"]["knobs"] - ) - - new_review_data_outputs = {} - outputs_settings = [] - # Check deprecated ExtractReviewDataMov - # settings for backwards compatibility - deprecrated_review_settings = ayon_publish["ExtractReviewDataMov"] - current_review_settings = ( - ayon_publish.get("ExtractReviewIntermediates") - ) - if deprecrated_review_settings["enabled"]: - outputs_settings = deprecrated_review_settings["outputs"] - elif current_review_settings is None: - pass - elif current_review_settings["enabled"]: - outputs_settings = current_review_settings["outputs"] - - for item in outputs_settings: - item_filter = item["filter"] - if "product_names" in item_filter: - item_filter["subsets"] = item_filter.pop("product_names") - item_filter["families"] = item_filter.pop("product_types") - - reformat_nodes_config = item.get("reformat_nodes_config") or {} - reposition_nodes = reformat_nodes_config.get( - "reposition_nodes") or [] - - for reposition_node in reposition_nodes: - if "knobs" not in reposition_node: - continue - reposition_node["knobs"] = _convert_nuke_knobs( - reposition_node["knobs"] - ) - - name = item.pop("name") - new_review_data_outputs[name] = item - - if deprecrated_review_settings["enabled"]: - deprecrated_review_settings["outputs"] = new_review_data_outputs - elif current_review_settings["enabled"]: - current_review_settings["outputs"] = new_review_data_outputs - - collect_instance_data = ayon_publish["CollectInstanceData"] - if "sync_workfile_version_on_product_types" in collect_instance_data: - collect_instance_data["sync_workfile_version_on_families"] = ( - collect_instance_data.pop( - "sync_workfile_version_on_product_types")) - - # --- ImageIO --- - # NOTE 'monitorOutLut' is maybe not yet in v3 (ut should be) - _convert_host_imageio(ayon_nuke) - ayon_imageio = ayon_nuke["imageio"] - - # workfile - imageio_workfile = ayon_imageio["workfile"] - workfile_keys_mapping = ( - ("color_management", "colorManagement"), - ("native_ocio_config", "OCIO_config"), - ("working_space", "workingSpaceLUT"), - ("thumbnail_space", "monitorLut"), - ) - for src, dst in workfile_keys_mapping: - if ( - src in imageio_workfile - and dst not in imageio_workfile - ): - imageio_workfile[dst] = imageio_workfile.pop(src) - - # regex inputs - if "regex_inputs" in ayon_imageio: - ayon_imageio["regexInputs"] = ayon_imageio.pop("regex_inputs") - - # nodes - ayon_imageio_nodes = ayon_imageio["nodes"] - if "required_nodes" in ayon_imageio_nodes: - ayon_imageio_nodes["requiredNodes"] = ( - ayon_imageio_nodes.pop("required_nodes")) - if "override_nodes" in ayon_imageio_nodes: - ayon_imageio_nodes["overrideNodes"] = ( - ayon_imageio_nodes.pop("override_nodes")) - - for item in ayon_imageio_nodes["requiredNodes"]: - if "nuke_node_class" in item: - item["nukeNodeClass"] = item.pop("nuke_node_class") - item["knobs"] = _convert_nuke_knobs(item["knobs"]) - - for item in ayon_imageio_nodes["overrideNodes"]: - if "nuke_node_class" in item: - item["nukeNodeClass"] = item.pop("nuke_node_class") - item["knobs"] = _convert_nuke_knobs(item["knobs"]) - - output["nuke"] = ayon_nuke - - -def _convert_hiero_project_settings(ayon_settings, output): - if "hiero" not in ayon_settings: - return - - ayon_hiero = ayon_settings["hiero"] - _convert_host_imageio(ayon_hiero) - - new_gui_filters = {} - for item in ayon_hiero.pop("filters", []): - subvalue = {} - key = item["name"] - for subitem in item["value"]: - subvalue[subitem["name"]] = subitem["value"] - new_gui_filters[key] = subvalue - ayon_hiero["filters"] = new_gui_filters - - ayon_load_clip = ayon_hiero["load"]["LoadClip"] - if "product_types" in ayon_load_clip: - ayon_load_clip["families"] = ayon_load_clip.pop("product_types") - - ayon_load_clip = ayon_hiero["load"]["LoadClip"] - ayon_load_clip["clip_name_template"] = ( - ayon_load_clip["clip_name_template"] - .replace("{folder[name]}", "{asset}") - .replace("{product[name]}", "{subset}") - ) - - output["hiero"] = ayon_hiero - - -def _convert_photoshop_project_settings(ayon_settings, output): - if "photoshop" not in ayon_settings: - return - - ayon_photoshop = ayon_settings["photoshop"] - _convert_host_imageio(ayon_photoshop) - - ayon_publish_photoshop = ayon_photoshop["publish"] - - ayon_colorcoded = ayon_publish_photoshop["CollectColorCodedInstances"] - if "flatten_product_type_template" in ayon_colorcoded: - ayon_colorcoded["flatten_subset_template"] = ( - ayon_colorcoded.pop("flatten_product_type_template")) - - collect_review = ayon_publish_photoshop["CollectReview"] - if "active" in collect_review: - collect_review["publish"] = collect_review.pop("active") - - output["photoshop"] = ayon_photoshop - - -def _convert_substancepainter_project_settings(ayon_settings, output): - if "substancepainter" not in ayon_settings: - return - - ayon_substance_painter = ayon_settings["substancepainter"] - _convert_host_imageio(ayon_substance_painter) - if "shelves" in ayon_substance_painter: - shelves_items = ayon_substance_painter["shelves"] - new_shelves_items = { - item["name"]: item["value"] - for item in shelves_items - } - ayon_substance_painter["shelves"] = new_shelves_items - - output["substancepainter"] = ayon_substance_painter - - -def _convert_tvpaint_project_settings(ayon_settings, output): - if "tvpaint" not in ayon_settings: - return - ayon_tvpaint = ayon_settings["tvpaint"] - - _convert_host_imageio(ayon_tvpaint) - - ayon_publish_settings = ayon_tvpaint["publish"] - for plugin_name in ( - "ValidateProjectSettings", - "ValidateMarks", - "ValidateStartFrame", - "ValidateAssetName", - ): - ayon_value = ayon_publish_settings[plugin_name] - for src_key, dst_key in ( - ("action_enabled", "optional"), - ("action_enable", "active"), - ): - if src_key in ayon_value: - ayon_value[dst_key] = ayon_value.pop(src_key) - - extract_sequence_setting = ayon_publish_settings["ExtractSequence"] - extract_sequence_setting["review_bg"] = _convert_color( - extract_sequence_setting["review_bg"] - ) - - output["tvpaint"] = ayon_tvpaint - - -def _convert_traypublisher_project_settings(ayon_settings, output): - if "traypublisher" not in ayon_settings: - return - - ayon_traypublisher = ayon_settings["traypublisher"] - - _convert_host_imageio(ayon_traypublisher) - - ayon_editorial_simple = ( - ayon_traypublisher["editorial_creators"]["editorial_simple"] - ) - # Subset -> Product type conversion - if "product_type_presets" in ayon_editorial_simple: - family_presets = ayon_editorial_simple.pop("product_type_presets") - for item in family_presets: - item["family"] = item.pop("product_type") - ayon_editorial_simple["family_presets"] = family_presets - - if "shot_metadata_creator" in ayon_editorial_simple: - shot_metadata_creator = ayon_editorial_simple.pop( - "shot_metadata_creator" - ) - if isinstance(shot_metadata_creator["clip_name_tokenizer"], dict): - shot_metadata_creator["clip_name_tokenizer"] = [ - {"name": "_sequence_", "regex": "(sc\\d{3})"}, - {"name": "_shot_", "regex": "(sh\\d{3})"}, - ] - ayon_editorial_simple.update(shot_metadata_creator) - - ayon_editorial_simple["clip_name_tokenizer"] = { - item["name"]: item["regex"] - for item in ayon_editorial_simple["clip_name_tokenizer"] - } - - if "shot_subset_creator" in ayon_editorial_simple: - ayon_editorial_simple.update( - ayon_editorial_simple.pop("shot_subset_creator")) - for item in ayon_editorial_simple["shot_hierarchy"]["parents"]: - item["type"] = item.pop("parent_type") - - # Simple creators - ayon_simple_creators = ayon_traypublisher["simple_creators"] - for item in ayon_simple_creators: - if "product_type" not in item: - break - item["family"] = item.pop("product_type") - - shot_add_tasks = ayon_editorial_simple["shot_add_tasks"] - - # TODO: backward compatibility and remove in future - if isinstance(shot_add_tasks, dict): - shot_add_tasks = [] - - # aggregate shot_add_tasks items - new_shot_add_tasks = { - item["name"]: {"type": item["task_type"]} - for item in shot_add_tasks - } - ayon_editorial_simple["shot_add_tasks"] = new_shot_add_tasks - - output["traypublisher"] = ayon_traypublisher - - -def _convert_webpublisher_project_settings(ayon_settings, output): - if "webpublisher" not in ayon_settings: - return - - ayon_webpublisher = ayon_settings["webpublisher"] - _convert_host_imageio(ayon_webpublisher) - - ayon_publish = ayon_webpublisher["publish"] - - ayon_collect_files = ayon_publish["CollectPublishedFiles"] - ayon_collect_files["task_type_to_family"] = { - item["name"]: item["value"] - for item in ayon_collect_files["task_type_to_family"] - } - - output["webpublisher"] = ayon_webpublisher - - -def _convert_deadline_project_settings(ayon_settings, output): - if "deadline" not in ayon_settings: - return - - ayon_deadline = ayon_settings["deadline"] - - for key in ("deadline_urls",): - ayon_deadline.pop(key) - - ayon_deadline_publish = ayon_deadline["publish"] - limit_groups = { - item["name"]: item["value"] - for item in ayon_deadline_publish["NukeSubmitDeadline"]["limit_groups"] - } - ayon_deadline_publish["NukeSubmitDeadline"]["limit_groups"] = limit_groups - - maya_submit = ayon_deadline_publish["MayaSubmitDeadline"] - for json_key in ("jobInfo", "pluginInfo"): - src_text = maya_submit.pop(json_key) - try: - value = json.loads(src_text) - except ValueError: - value = {} - maya_submit[json_key] = value - - nuke_submit = ayon_deadline_publish["NukeSubmitDeadline"] - nuke_submit["env_search_replace_values"] = { - item["name"]: item["value"] - for item in nuke_submit.pop("env_search_replace_values") - } - nuke_submit["limit_groups"] = { - item["name"]: item["value"] for item in nuke_submit.pop("limit_groups") - } - - process_subsetted_job = ayon_deadline_publish["ProcessSubmittedJobOnFarm"] - process_subsetted_job["aov_filter"] = { - item["name"]: item["value"] - for item in process_subsetted_job.pop("aov_filter") - } - - output["deadline"] = ayon_deadline - - -def _convert_royalrender_project_settings(ayon_settings, output): - if "royalrender" not in ayon_settings: - return - ayon_royalrender = ayon_settings["royalrender"] - rr_paths = ayon_royalrender.get("selected_rr_paths", []) - - output["royalrender"] = { - "publish": ayon_royalrender["publish"], - "rr_paths": rr_paths, - } - - -def _convert_kitsu_project_settings(ayon_settings, output): - if "kitsu" not in ayon_settings: - return - - ayon_kitsu_settings = ayon_settings["kitsu"] - ayon_kitsu_settings.pop("server") - - integrate_note = ayon_kitsu_settings["publish"]["IntegrateKitsuNote"] - status_change_conditions = integrate_note["status_change_conditions"] - if "product_type_requirements" in status_change_conditions: - status_change_conditions["family_requirements"] = ( - status_change_conditions.pop("product_type_requirements")) - - output["kitsu"] = ayon_kitsu_settings - - -def _convert_shotgrid_project_settings(ayon_settings, output): - if "shotgrid" not in ayon_settings: - return - - ayon_shotgrid = ayon_settings["shotgrid"] - # This means that a different variant of addon is used - if "leecher_backend_url" not in ayon_shotgrid: - return - - for key in { - "leecher_backend_url", - "filter_projects_by_login", - "shotgrid_settings", - "leecher_manager_url", - }: - ayon_shotgrid.pop(key) - - asset_field = ayon_shotgrid["fields"]["asset"] - asset_field["type"] = asset_field.pop("asset_type") - - task_field = ayon_shotgrid["fields"]["task"] - if "task" in task_field: - task_field["step"] = task_field.pop("task") - - output["shotgrid"] = ayon_settings["shotgrid"] - - -def _convert_slack_project_settings(ayon_settings, output): - if "slack" not in ayon_settings: - return - - ayon_slack = ayon_settings["slack"] - ayon_slack.pop("enabled", None) - for profile in ayon_slack["publish"]["CollectSlackFamilies"]["profiles"]: - profile["tasks"] = profile.pop("task_names") - profile["subsets"] = profile.pop("subset_names") - - output["slack"] = ayon_slack - - -def _convert_global_project_settings(ayon_settings, output, default_settings): - if "core" not in ayon_settings: - return - - ayon_core = ayon_settings["core"] - - _convert_host_imageio(ayon_core) - - for key in ( - "environments", - "studio_name", - "studio_code", - ): - ayon_core.pop(key, None) - - # Publish conversion - ayon_publish = ayon_core["publish"] - - ayon_collect_audio = ayon_publish["CollectAudio"] - if "audio_product_name" in ayon_collect_audio: - ayon_collect_audio["audio_subset_name"] = ( - ayon_collect_audio.pop("audio_product_name")) - - for profile in ayon_publish["ExtractReview"]["profiles"]: - if "product_types" in profile: - profile["families"] = profile.pop("product_types") - new_outputs = {} - for output_def in profile.pop("outputs"): - name = output_def.pop("name") - new_outputs[name] = output_def - - output_def_filter = output_def["filter"] - if "product_names" in output_def_filter: - output_def_filter["subsets"] = ( - output_def_filter.pop("product_names")) - - for color_key in ("overscan_color", "bg_color"): - output_def[color_key] = _convert_color(output_def[color_key]) - - letter_box = output_def["letter_box"] - for color_key in ("fill_color", "line_color"): - letter_box[color_key] = _convert_color(letter_box[color_key]) - - if "output_width" in output_def: - output_def["width"] = output_def.pop("output_width") - - if "output_height" in output_def: - output_def["height"] = output_def.pop("output_height") - - profile["outputs"] = new_outputs - - # ExtractThumbnail plugin - ayon_extract_thumbnail = ayon_publish["ExtractThumbnail"] - # fix display and view at oiio defaults - ayon_default_oiio = copy.deepcopy( - ayon_extract_thumbnail["oiiotool_defaults"]) - display_and_view = ayon_default_oiio.pop("display_and_view") - ayon_default_oiio["display"] = display_and_view["display"] - ayon_default_oiio["view"] = display_and_view["view"] - ayon_extract_thumbnail["oiiotool_defaults"] = ayon_default_oiio - # fix target size - ayon_default_resize = copy.deepcopy(ayon_extract_thumbnail["target_size"]) - resize = ayon_default_resize.pop("resize") - ayon_default_resize["width"] = resize["width"] - ayon_default_resize["height"] = resize["height"] - ayon_extract_thumbnail["target_size"] = ayon_default_resize - # fix background color - ayon_extract_thumbnail["background_color"] = _convert_color( - ayon_extract_thumbnail["background_color"] - ) - - # ExtractOIIOTranscode plugin - extract_oiio_transcode = ayon_publish["ExtractOIIOTranscode"] - extract_oiio_transcode_profiles = extract_oiio_transcode["profiles"] - for profile in extract_oiio_transcode_profiles: - new_outputs = {} - name_counter = {} - if "product_names" in profile: - profile["subsets"] = profile.pop("product_names") - for profile_output in profile["outputs"]: - if "name" in profile_output: - name = profile_output.pop("name") - else: - # Backwards compatibility for setting without 'name' in model - name = profile_output["extension"] - if name in new_outputs: - name_counter[name] += 1 - name = "{}_{}".format(name, name_counter[name]) - else: - name_counter[name] = 0 - - new_outputs[name] = profile_output - profile["outputs"] = new_outputs - - # Extract Burnin plugin - extract_burnin = ayon_publish["ExtractBurnin"] - extract_burnin_options = extract_burnin["options"] - for color_key in ("font_color", "bg_color"): - extract_burnin_options[color_key] = _convert_color( - extract_burnin_options[color_key] - ) - - for profile in extract_burnin["profiles"]: - extract_burnin_defs = profile["burnins"] - if "product_names" in profile: - profile["subsets"] = profile.pop("product_names") - profile["families"] = profile.pop("product_types") - - for burnin_def in extract_burnin_defs: - for key in ( - "TOP_LEFT", - "TOP_CENTERED", - "TOP_RIGHT", - "BOTTOM_LEFT", - "BOTTOM_CENTERED", - "BOTTOM_RIGHT", - ): - burnin_def[key] = ( - burnin_def[key] - .replace("{product[name]}", "{subset}") - .replace("{Product[name]}", "{Subset}") - .replace("{PRODUCT[NAME]}", "{SUBSET}") - .replace("{product[type]}", "{family}") - .replace("{Product[type]}", "{Family}") - .replace("{PRODUCT[TYPE]}", "{FAMILY}") - .replace("{folder[name]}", "{asset}") - .replace("{Folder[name]}", "{Asset}") - .replace("{FOLDER[NAME]}", "{ASSET}") - ) - profile["burnins"] = { - extract_burnin_def.pop("name"): extract_burnin_def - for extract_burnin_def in extract_burnin_defs - } - - if "IntegrateProductGroup" in ayon_publish: - subset_group = ayon_publish.pop("IntegrateProductGroup") - subset_group_profiles = subset_group.pop("product_grouping_profiles") - for profile in subset_group_profiles: - profile["families"] = profile.pop("product_types") - subset_group["subset_grouping_profiles"] = subset_group_profiles - ayon_publish["IntegrateSubsetGroup"] = subset_group - - # Cleanup plugin - ayon_cleanup = ayon_publish["CleanUp"] - if "patterns" in ayon_cleanup: - ayon_cleanup["paterns"] = ayon_cleanup.pop("patterns") - - # Project root settings - json string to dict - ayon_core["project_environments"] = json.loads( - ayon_core["project_environments"] - ) - ayon_core["project_folder_structure"] = json.dumps(json.loads( - ayon_core["project_folder_structure"] - )) - - # Tools settings - ayon_tools = ayon_core["tools"] - ayon_create_tool = ayon_tools["creator"] - if "product_name_profiles" in ayon_create_tool: - product_name_profiles = ayon_create_tool.pop("product_name_profiles") - for profile in product_name_profiles: - profile["families"] = profile.pop("product_types") - ayon_create_tool["subset_name_profiles"] = product_name_profiles - - for profile in ayon_create_tool["subset_name_profiles"]: - template = profile["template"] - profile["template"] = ( - template - .replace("{task[name]}", "{task}") - .replace("{Task[name]}", "{Task}") - .replace("{TASK[NAME]}", "{TASK}") - .replace("{product[type]}", "{family}") - .replace("{Product[type]}", "{Family}") - .replace("{PRODUCT[TYPE]}", "{FAMILY}") - .replace("{folder[name]}", "{asset}") - .replace("{Folder[name]}", "{Asset}") - .replace("{FOLDER[NAME]}", "{ASSET}") - ) - - product_smart_select_key = "families_smart_select" - if "product_types_smart_select" in ayon_create_tool: - product_smart_select_key = "product_types_smart_select" - - new_smart_select_families = { - item["name"]: item["task_names"] - for item in ayon_create_tool.pop(product_smart_select_key) - } - ayon_create_tool["families_smart_select"] = new_smart_select_families - - ayon_loader_tool = ayon_tools["loader"] - if "product_type_filter_profiles" in ayon_loader_tool: - product_type_filter_profiles = ( - ayon_loader_tool.pop("product_type_filter_profiles")) - for profile in product_type_filter_profiles: - profile["filter_families"] = profile.pop("filter_product_types") - - ayon_loader_tool["family_filter_profiles"] = ( - product_type_filter_profiles) - - ayon_publish_tool = ayon_tools["publish"] - for profile in ayon_publish_tool["hero_template_name_profiles"]: - if "product_types" in profile: - profile["families"] = profile.pop("product_types") - - for profile in ayon_publish_tool["template_name_profiles"]: - if "product_types" in profile: - profile["families"] = profile.pop("product_types") - - ayon_core["sync_server"] = ( - default_settings["global"]["sync_server"] - ) - output["global"] = ayon_core - - -def convert_project_settings(ayon_settings, default_settings): - # Missing settings - # - standalonepublisher - default_settings = copy.deepcopy(default_settings) - output = {} - exact_match = { - "aftereffects", - "harmony", - "houdini", - "resolve", - "unreal", - } - for key in exact_match: - if key in ayon_settings: - output[key] = ayon_settings[key] - _convert_host_imageio(output[key]) - - _convert_applications_project_settings(ayon_settings, output) - _convert_blender_project_settings(ayon_settings, output) - _convert_celaction_project_settings(ayon_settings, output) - _convert_flame_project_settings(ayon_settings, output) - _convert_fusion_project_settings(ayon_settings, output) - _convert_maya_project_settings(ayon_settings, output) - _convert_3dsmax_project_settings(ayon_settings, output) - _convert_nuke_project_settings(ayon_settings, output) - _convert_hiero_project_settings(ayon_settings, output) - _convert_photoshop_project_settings(ayon_settings, output) - _convert_substancepainter_project_settings(ayon_settings, output) - _convert_tvpaint_project_settings(ayon_settings, output) - _convert_traypublisher_project_settings(ayon_settings, output) - _convert_webpublisher_project_settings(ayon_settings, output) - - _convert_deadline_project_settings(ayon_settings, output) - _convert_royalrender_project_settings(ayon_settings, output) - _convert_kitsu_project_settings(ayon_settings, output) - _convert_shotgrid_project_settings(ayon_settings, output) - _convert_slack_project_settings(ayon_settings, output) - - _convert_global_project_settings(ayon_settings, output, default_settings) - - for key, value in ayon_settings.items(): - if key not in output: - output[key] = value - - for key, value in default_settings.items(): - if key not in output: - output[key] = value - - return output - - -class CacheItem: - lifetime = 10 - - def __init__(self, value, outdate_time=None): - self._value = value - if outdate_time is None: - outdate_time = time.time() + self.lifetime - self._outdate_time = outdate_time - - @classmethod - def create_outdated(cls): - return cls({}, 0) - - def get_value(self): - return copy.deepcopy(self._value) - - def update_value(self, value): - self._value = value - self._outdate_time = time.time() + self.lifetime - - @property - def is_outdated(self): - return time.time() > self._outdate_time - - -class _AyonSettingsCache: - use_bundles = None - variant = None - addon_versions = CacheItem.create_outdated() - studio_settings = CacheItem.create_outdated() - cache_by_project_name = collections.defaultdict( - CacheItem.create_outdated) - - @classmethod - def _use_bundles(cls): - if _AyonSettingsCache.use_bundles is None: - con = get_ayon_server_api_connection() - major, minor, _, _, _ = con.get_server_version_tuple() - use_bundles = True - if (major, minor) < (0, 3): - use_bundles = False - _AyonSettingsCache.use_bundles = use_bundles - return _AyonSettingsCache.use_bundles - - @classmethod - def _get_variant(cls): - if _AyonSettingsCache.variant is None: - from ayon_core.lib import is_staging_enabled - - variant = "production" - if is_dev_mode_enabled(): - variant = cls._get_bundle_name() - elif is_staging_enabled(): - variant = "staging" - - # Cache variant - _AyonSettingsCache.variant = variant - - # Set the variant to global ayon api connection - con = get_ayon_server_api_connection() - con.set_default_settings_variant(variant) - return _AyonSettingsCache.variant - - @classmethod - def _get_bundle_name(cls): - return os.environ["AYON_BUNDLE_NAME"] - - @classmethod - def get_value_by_project(cls, project_name): - cache_item = _AyonSettingsCache.cache_by_project_name[project_name] - if cache_item.is_outdated: - con = get_ayon_server_api_connection() - if cls._use_bundles(): - value = con.get_addons_settings( - bundle_name=cls._get_bundle_name(), - project_name=project_name, - variant=cls._get_variant() - ) - else: - value = con.get_addons_settings(project_name) - cache_item.update_value(value) - return cache_item.get_value() - - @classmethod - def _get_addon_versions_from_bundle(cls): - con = get_ayon_server_api_connection() - expected_bundle = cls._get_bundle_name() - bundles = con.get_bundles()["bundles"] - bundle = next( - ( - bundle - for bundle in bundles - if bundle["name"] == expected_bundle - ), - None - ) - if bundle is not None: - return bundle["addons"] - return {} - - @classmethod - def get_addon_versions(cls): - cache_item = _AyonSettingsCache.addon_versions - if cache_item.is_outdated: - if cls._use_bundles(): - addons = cls._get_addon_versions_from_bundle() - else: - con = get_ayon_server_api_connection() - settings_data = con.get_addons_settings( - only_values=False, - variant=cls._get_variant() - ) - addons = settings_data["versions"] - cache_item.update_value(addons) - - return cache_item.get_value() - - -def get_ayon_project_settings(default_values, project_name): - ayon_settings = _AyonSettingsCache.get_value_by_project(project_name) - return convert_project_settings(ayon_settings, default_values) - - -def get_ayon_system_settings(default_values): - addon_versions = _AyonSettingsCache.get_addon_versions() - ayon_settings = _AyonSettingsCache.get_value_by_project(None) - - return convert_system_settings( - ayon_settings, default_values, addon_versions - ) - - -def get_ayon_settings(project_name=None): - """AYON studio settings. - - Raw AYON settings values. - - Args: - project_name (Optional[str]): Project name. - - Returns: - dict[str, Any]: AYON settings. - """ - - return _AyonSettingsCache.get_value_by_project(project_name) diff --git a/client/ayon_core/settings/constants.py b/client/ayon_core/settings/constants.py deleted file mode 100644 index 0db3948b64..0000000000 --- a/client/ayon_core/settings/constants.py +++ /dev/null @@ -1,28 +0,0 @@ -# Metadata keys for work with studio and project overrides -M_OVERRIDDEN_KEY = "__overriden_keys__" -# Metadata key for storing dynamic created labels -M_DYNAMIC_KEY_LABEL = "__dynamic_keys_labels__" - -METADATA_KEYS = frozenset([ - M_OVERRIDDEN_KEY, - M_DYNAMIC_KEY_LABEL -]) - -# Keys where studio's system overrides are stored -SYSTEM_SETTINGS_KEY = "system_settings" -PROJECT_SETTINGS_KEY = "project_settings" - -DEFAULT_PROJECT_KEY = "__default_project__" - - -__all__ = ( - "M_OVERRIDDEN_KEY", - "M_DYNAMIC_KEY_LABEL", - - "METADATA_KEYS", - - "SYSTEM_SETTINGS_KEY", - "PROJECT_SETTINGS_KEY", - - "DEFAULT_PROJECT_KEY", -) diff --git a/client/ayon_core/settings/defaults/project_anatomy/attributes.json b/client/ayon_core/settings/defaults/project_anatomy/attributes.json deleted file mode 100644 index 0cc414fb69..0000000000 --- a/client/ayon_core/settings/defaults/project_anatomy/attributes.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "fps": 25.0, - "frameStart": 1001, - "frameEnd": 1001, - "clipIn": 1, - "clipOut": 1, - "handleStart": 0, - "handleEnd": 0, - "resolutionWidth": 1920, - "resolutionHeight": 1080, - "pixelAspect": 1.0, - "applications": [ - "maya/2020", - "nuke/12-2", - "nukex/12-2", - "hiero/12-2", - "resolve/stable", - "houdini/18-5", - "blender/2-91", - "harmony/20", - "photoshop/2021", - "aftereffects/2021" - ], - "tools_env": [], - "active": true -} diff --git a/client/ayon_core/settings/defaults/project_anatomy/imageio.json b/client/ayon_core/settings/defaults/project_anatomy/imageio.json deleted file mode 100644 index d38d0a0774..0000000000 --- a/client/ayon_core/settings/defaults/project_anatomy/imageio.json +++ /dev/null @@ -1,258 +0,0 @@ -{ - "hiero": { - "workfile": { - "ocioConfigName": "nuke-default", - "ocioconfigpath": { - "windows": [], - "darwin": [], - "linux": [] - }, - "workingSpace": "linear", - "sixteenBitLut": "sRGB", - "eightBitLut": "sRGB", - "floatLut": "linear", - "logLut": "Cineon", - "viewerLut": "sRGB", - "thumbnailLut": "sRGB" - }, - "regexInputs": { - "inputs": [ - { - "regex": "[^-a-zA-Z0-9](plateRef).*(?=mp4)", - "colorspace": "sRGB" - } - ] - } - }, - "nuke": { - "viewer": { - "viewerProcess": "sRGB" - }, - "baking": { - "viewerProcess": "rec709" - }, - "workfile": { - "colorManagement": "Nuke", - "OCIO_config": "nuke-default", - "customOCIOConfigPath": { - "windows": [], - "darwin": [], - "linux": [] - }, - "workingSpaceLUT": "linear", - "monitorLut": "sRGB", - "int8Lut": "sRGB", - "int16Lut": "sRGB", - "logLut": "Cineon", - "floatLut": "linear" - }, - "nodes": { - "requiredNodes": [ - { - "plugins": [ - "CreateWriteRender" - ], - "nukeNodeClass": "Write", - "knobs": [ - { - "type": "text", - "name": "file_type", - "value": "exr" - }, - { - "type": "text", - "name": "datatype", - "value": "16 bit half" - }, - { - "type": "text", - "name": "compression", - "value": "Zip (1 scanline)" - }, - { - "type": "bool", - "name": "autocrop", - "value": true - }, - { - "type": "color_gui", - "name": "tile_color", - "value": [ - 186, - 35, - 35, - 255 - ] - }, - { - "type": "text", - "name": "channels", - "value": "rgb" - }, - { - "type": "text", - "name": "colorspace", - "value": "linear" - }, - { - "type": "bool", - "name": "create_directories", - "value": true - } - ] - }, - { - "plugins": [ - "CreateWritePrerender" - ], - "nukeNodeClass": "Write", - "knobs": [ - { - "type": "text", - "name": "file_type", - "value": "exr" - }, - { - "type": "text", - "name": "datatype", - "value": "16 bit half" - }, - { - "type": "text", - "name": "compression", - "value": "Zip (1 scanline)" - }, - { - "type": "bool", - "name": "autocrop", - "value": true - }, - { - "type": "color_gui", - "name": "tile_color", - "value": [ - 171, - 171, - 10, - 255 - ] - }, - { - "type": "text", - "name": "channels", - "value": "rgb" - }, - { - "type": "text", - "name": "colorspace", - "value": "linear" - }, - { - "type": "bool", - "name": "create_directories", - "value": true - } - ] - }, - { - "plugins": [ - "CreateWriteImage" - ], - "nukeNodeClass": "Write", - "knobs": [ - { - "type": "text", - "name": "file_type", - "value": "tiff" - }, - { - "type": "text", - "name": "datatype", - "value": "16 bit" - }, - { - "type": "text", - "name": "compression", - "value": "Deflate" - }, - { - "type": "color_gui", - "name": "tile_color", - "value": [ - 56, - 162, - 7, - 255 - ] - }, - { - "type": "text", - "name": "channels", - "value": "rgb" - }, - { - "type": "text", - "name": "colorspace", - "value": "sRGB" - }, - { - "type": "bool", - "name": "create_directories", - "value": true - } - ] - } - ], - "overrideNodes": [] - }, - "regexInputs": { - "inputs": [ - { - "regex": "(beauty).*(?=.exr)", - "colorspace": "linear" - } - ] - } - }, - "maya": { - "colorManagementPreference_v2": { - "enabled": true, - "configFilePath": { - "windows": [], - "darwin": [], - "linux": [] - }, - "renderSpace": "ACEScg", - "displayName": "sRGB", - "viewName": "ACES 1.0 SDR-video" - }, - "colorManagementPreference": { - "configFilePath": { - "windows": [], - "darwin": [], - "linux": [] - }, - "renderSpace": "scene-linear Rec 709/sRGB", - "viewTransform": "sRGB gamma" - } - }, - "flame": { - "project": { - "colourPolicy": "ACES 1.1", - "frameDepth": "16-bit fp", - "fieldDominance": "PROGRESSIVE" - }, - "profilesMapping": { - "inputs": [ - { - "flameName": "ACEScg", - "ocioName": "ACES - ACEScg" - }, - { - "flameName": "Rec.709 video", - "ocioName": "Output - Rec.709" - } - ] - } - } -} diff --git a/client/ayon_core/settings/defaults/project_anatomy/roots.json b/client/ayon_core/settings/defaults/project_anatomy/roots.json deleted file mode 100644 index 8171d17d56..0000000000 --- a/client/ayon_core/settings/defaults/project_anatomy/roots.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "work": { - "windows": "C:/projects", - "darwin": "/Volumes/path", - "linux": "/mnt/share/projects" - } -} diff --git a/client/ayon_core/settings/defaults/project_anatomy/tasks.json b/client/ayon_core/settings/defaults/project_anatomy/tasks.json deleted file mode 100644 index 135462839f..0000000000 --- a/client/ayon_core/settings/defaults/project_anatomy/tasks.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "Generic": { - "short_name": "gener" - }, - "Art": { - "short_name": "art" - }, - "Modeling": { - "short_name": "mdl" - }, - "Texture": { - "short_name": "tex" - }, - "Lookdev": { - "short_name": "look" - }, - "Rigging": { - "short_name": "rig" - }, - "Edit": { - "short_name": "edit" - }, - "Layout": { - "short_name": "lay" - }, - "Setdress": { - "short_name": "dress" - }, - "Animation": { - "short_name": "anim" - }, - "FX": { - "short_name": "fx" - }, - "Lighting": { - "short_name": "lgt" - }, - "Paint": { - "short_name": "paint" - }, - "Compositing": { - "short_name": "comp" - } -} diff --git a/client/ayon_core/settings/defaults/project_anatomy/templates.json b/client/ayon_core/settings/defaults/project_anatomy/templates.json deleted file mode 100644 index 6c3e038d27..0000000000 --- a/client/ayon_core/settings/defaults/project_anatomy/templates.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "defaults": { - "version_padding": 3, - "version": "v{version:0>{@version_padding}}", - "frame_padding": 4, - "frame": "{frame:0>{@frame_padding}}" - }, - "work": { - "folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/work/{task[name]}", - "file": "{project[code]}_{asset}_{task[name]}_{@version}<_{comment}>.{ext}", - "path": "{@folder}/{@file}" - }, - "render": { - "folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}", - "file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}>.{ext}", - "path": "{@folder}/{@file}" - }, - "publish": { - "folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}", - "file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}><_{udim}>.{ext}", - "path": "{@folder}/{@file}", - "thumbnail": "{thumbnail_root}/{project[name]}/{_id}_{thumbnail_type}.{ext}" - }, - "hero": { - "folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/hero", - "file": "{project[code]}_{asset}_{subset}_hero<_{output}><.{frame}>.{ext}", - "path": "{@folder}/{@file}" - }, - "delivery": {}, - "unreal": { - "folder": "{root[work]}/{project[name]}/unreal/{task[name]}", - "file": "{project[code]}_{asset}.{ext}", - "path": "{@folder}/{@file}" - }, - "others": { - "maya2unreal": { - "folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}", - "file": "{subset}_{@version}<_{output}><.{@frame}>.{ext}", - "path": "{@folder}/{@file}" - }, - "online": { - "folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}", - "file": "{originalBasename}<.{@frame}><_{udim}>.{ext}", - "path": "{@folder}/{@file}" - }, - "tycache": { - "folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}", - "file": "{originalBasename}.{ext}", - "path": "{@folder}/{@file}" - }, - "source": { - "folder": "{root[work]}/{originalDirname}", - "file": "{originalBasename}.{ext}", - "path": "{@folder}/{@file}" - }, - "transient": { - "folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/work/{family}/{subset}" - }, - "__dynamic_keys_labels__": { - "maya2unreal": "Maya to Unreal", - "online": "online", - "tycache": "tycache", - "source": "source", - "transient": "transient" - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/aftereffects.json b/client/ayon_core/settings/defaults/project_settings/aftereffects.json deleted file mode 100644 index 77ccb74410..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/aftereffects.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "create": { - "RenderCreator": { - "default_variants": [ - "Main" - ], - "mark_for_review": true - } - }, - "publish": { - "CollectReview": { - "enabled": true - }, - "ValidateSceneSettings": { - "enabled": true, - "optional": true, - "active": true, - "skip_resolution_check": [ - ".*" - ], - "skip_timelines_check": [ - ".*" - ] - }, - "ValidateContainers": { - "enabled": true, - "optional": true, - "active": true - } - }, - "workfile_builder": { - "create_first_version": false, - "custom_templates": [] - }, - "templated_workfile_build": { - "profiles": [] - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/applications.json b/client/ayon_core/settings/defaults/project_settings/applications.json deleted file mode 100644 index 62f3cdfe1b..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/applications.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "only_available": false -} diff --git a/client/ayon_core/settings/defaults/project_settings/blender.json b/client/ayon_core/settings/defaults/project_settings/blender.json deleted file mode 100644 index 385e97ef91..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/blender.json +++ /dev/null @@ -1,216 +0,0 @@ -{ - "unit_scale_settings": { - "enabled": true, - "apply_on_opening": false, - "base_file_unit_scale": 0.01 - }, - "set_resolution_startup": true, - "set_frames_startup": true, - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "RenderSettings": { - "default_render_image_folder": "renders/blender", - "aov_separator": "underscore", - "image_format": "exr", - "multilayer_exr": true, - "aov_list": [], - "custom_passes": [] - }, - "workfile_builder": { - "create_first_version": false, - "custom_templates": [] - }, - "publish": { - "ValidateCameraZeroKeyframe": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateFileSaved": { - "enabled": true, - "optional": false, - "active": true, - "exclude_families": [] - }, - "ValidateRenderCameraIsSet": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateDeadlinePublish": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateMeshHasUvs": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateMeshNoNegativeScale": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateTransformZero": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateNoColonsInName": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateInstanceEmpty": { - "enabled": true, - "optional": false, - "active": true - }, - "ExtractBlend": { - "enabled": true, - "optional": true, - "active": true, - "families": [ - "model", - "camera", - "rig", - "action", - "layout", - "blendScene" - ] - }, - "ExtractFBX": { - "enabled": true, - "optional": true, - "active": false - }, - "ExtractModelABC": { - "enabled": true, - "optional": true, - "active": true - }, - "ExtractBlendAnimation": { - "enabled": true, - "optional": true, - "active": true - }, - "ExtractAnimationFBX": { - "enabled": true, - "optional": true, - "active": false - }, - "ExtractCamera": { - "enabled": true, - "optional": true, - "active": true - }, - "ExtractCameraABC": { - "enabled": true, - "optional": true, - "active": true - }, - "ExtractLayout": { - "enabled": true, - "optional": true, - "active": false - }, - "ExtractThumbnail": { - "enabled": true, - "optional": true, - "active": true, - "presets": { - "model": { - "image_settings": { - "file_format": "JPEG", - "color_mode": "RGB", - "quality": 100 - }, - "display_options": { - "shading": { - "light": "STUDIO", - "studio_light": "Default", - "type": "SOLID", - "color_type": "OBJECT", - "show_xray": false, - "show_shadows": false, - "show_cavity": true - }, - "overlay": { - "show_overlays": false - } - } - }, - "rig": { - "image_settings": { - "file_format": "JPEG", - "color_mode": "RGB", - "quality": 100 - }, - "display_options": { - "shading": { - "light": "STUDIO", - "studio_light": "Default", - "type": "SOLID", - "color_type": "OBJECT", - "show_xray": true, - "show_shadows": false, - "show_cavity": false - }, - "overlay": { - "show_overlays": true, - "show_ortho_grid": false, - "show_floor": false, - "show_axis_x": false, - "show_axis_y": false, - "show_axis_z": false, - "show_text": false, - "show_stats": false, - "show_cursor": false, - "show_annotation": false, - "show_extras": false, - "show_relationship_lines": false, - "show_outline_selected": false, - "show_motion_paths": false, - "show_object_origins": false, - "show_bones": true - } - } - } - } - }, - "ExtractPlayblast": { - "enabled": true, - "optional": true, - "active": true, - "presets": { - "default": { - "image_settings": { - "file_format": "PNG", - "color_mode": "RGB", - "color_depth": "8", - "compression": 15 - }, - "display_options": { - "shading": { - "type": "MATERIAL", - "render_pass": "COMBINED" - }, - "overlay": { - "show_overlays": false - } - } - } - } - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/celaction.json b/client/ayon_core/settings/defaults/project_settings/celaction.json deleted file mode 100644 index af56a36649..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/celaction.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "workfile": { - "submission_overrides": [ - "render_chunk", - "frame_range", - "resolution" - ] - }, - "publish": { - "CollectRenderPath": { - "output_extension": "png", - "anatomy_template_key_render_files": "render", - "anatomy_template_key_metadata": "render" - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/deadline.json b/client/ayon_core/settings/defaults/project_settings/deadline.json deleted file mode 100644 index b02cfa8207..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/deadline.json +++ /dev/null @@ -1,157 +0,0 @@ -{ - "deadline_servers": [], - "publish": { - "CollectDefaultDeadlineServer": { - "pass_mongo_url": true - }, - "CollectDeadlinePools": { - "primary_pool": "", - "secondary_pool": "" - }, - "ValidateExpectedFiles": { - "enabled": true, - "active": true, - "allow_user_override": true, - "families": [ - "render" - ], - "targets": [ - "deadline" - ] - }, - "MayaSubmitDeadline": { - "enabled": true, - "optional": false, - "active": true, - "tile_assembler_plugin": "DraftTileAssembler", - "use_published": true, - "import_reference": false, - "asset_dependencies": true, - "priority": 50, - "tile_priority": 50, - "group": "none", - "limit": [], - "jobInfo": {}, - "pluginInfo": {}, - "scene_patches": [], - "strict_error_checking": true - }, - "MaxSubmitDeadline": { - "enabled": true, - "optional": false, - "active": true, - "use_published": true, - "priority": 50, - "chunk_size": 10, - "group": "none" - }, - "FusionSubmitDeadline": { - "enabled": true, - "optional": false, - "active": true, - "priority": 50, - "chunk_size": 10, - "concurrent_tasks": 1, - "group": "", - "plugin": "Fusion" - }, - "NukeSubmitDeadline": { - "enabled": true, - "optional": false, - "active": true, - "priority": 50, - "chunk_size": 10, - "concurrent_tasks": 1, - "group": "", - "department": "", - "use_gpu": true, - "workfile_dependency": true, - "use_published_workfile": true, - "env_allowed_keys": [], - "env_search_replace_values": {}, - "limit_groups": {} - }, - "HarmonySubmitDeadline": { - "enabled": true, - "optional": false, - "active": true, - "use_published": true, - "priority": 50, - "chunk_size": 10000, - "group": "", - "department": "" - }, - "AfterEffectsSubmitDeadline": { - "enabled": true, - "optional": false, - "active": true, - "use_published": true, - "priority": 50, - "chunk_size": 10000, - "group": "", - "department": "", - "multiprocess": true - }, - "CelactionSubmitDeadline": { - "enabled": true, - "deadline_department": "", - "deadline_priority": 50, - "deadline_pool": "", - "deadline_pool_secondary": "", - "deadline_group": "", - "deadline_chunk_size": 10, - "deadline_job_delay": "00:00:00:00" - }, - "BlenderSubmitDeadline": { - "enabled": true, - "optional": false, - "active": true, - "use_published": true, - "priority": 50, - "chunk_size": 10, - "group": "none", - "job_delay": "00:00:00:00" - }, - "ProcessSubmittedCacheJobOnFarm": { - "enabled": true, - "deadline_department": "", - "deadline_pool": "", - "deadline_group": "", - "deadline_chunk_size": 1, - "deadline_priority": 50 - }, - "ProcessSubmittedJobOnFarm": { - "enabled": true, - "deadline_department": "", - "deadline_pool": "", - "deadline_group": "", - "deadline_chunk_size": 1, - "deadline_priority": 50, - "publishing_script": "", - "skip_integration_repre_list": [], - "aov_filter": { - "maya": [ - ".*([Bb]eauty).*" - ], - "blender": [ - ".*([Bb]eauty).*" - ], - "aftereffects": [ - ".*" - ], - "celaction": [ - ".*" - ], - "harmony": [ - ".*" - ], - "max": [ - ".*" - ], - "fusion": [ - ".*" - ] - } - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/flame.json b/client/ayon_core/settings/defaults/project_settings/flame.json deleted file mode 100644 index 5b4b62c140..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/flame.json +++ /dev/null @@ -1,148 +0,0 @@ -{ - "imageio": { - "activate_host_color_management": true, - "remapping": { - "rules": [] - }, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - }, - "project": { - "colourPolicy": "ACES 1.1", - "frameDepth": "16-bit fp", - "fieldDominance": "PROGRESSIVE" - }, - "profilesMapping": { - "inputs": [ - { - "flameName": "ACEScg", - "ocioName": "ACES - ACEScg" - }, - { - "flameName": "Rec.709 video", - "ocioName": "Output - Rec.709" - } - ] - } - }, - "create": { - "CreateShotClip": { - "hierarchy": "{folder}/{sequence}", - "useShotName": true, - "clipRename": false, - "clipName": "{sequence}{shot}", - "segmentIndex": true, - "countFrom": 10, - "countSteps": 10, - "folder": "shots", - "episode": "ep01", - "sequence": "a", - "track": "{_track_}", - "shot": "####", - "vSyncOn": false, - "workfileFrameStart": 1001, - "handleStart": 5, - "handleEnd": 5, - "includeHandles": false, - "retimedHandles": true, - "retimedFramerange": true - } - }, - "publish": { - "CollectTimelineInstances": { - "xml_preset_attrs_from_comments": [ - { - "name": "width", - "type": "number" - }, - { - "name": "height", - "type": "number" - }, - { - "name": "pixelRatio", - "type": "float" - }, - { - "name": "resizeType", - "type": "string" - }, - { - "name": "resizeFilter", - "type": "string" - } - ], - "add_tasks": [ - { - "name": "compositing", - "type": "Compositing", - "create_batch_group": true - } - ] - }, - "ExtractSubsetResources": { - "keep_original_representation": false, - "export_presets_mapping": { - "exr16fpdwaa": { - "active": true, - "export_type": "File Sequence", - "ext": "exr", - "xml_preset_file": "OpenEXR (16-bit fp DWAA).xml", - "colorspace_out": "ACES - ACEScg", - "xml_preset_dir": "", - "parsed_comment_attrs": true, - "representation_add_range": true, - "representation_tags": [], - "load_to_batch_group": true, - "batch_group_loader_name": "LoadClipBatch", - "filter_path_regex": ".*" - } - } - }, - "IntegrateBatchGroup": { - "enabled": false - } - }, - "load": { - "LoadClip": { - "enabled": true, - "families": [ - "render2d", - "source", - "plate", - "render", - "review" - ], - "reel_group_name": "OpenPype_Reels", - "reel_name": "Loaded", - "clip_name_template": "{asset}_{subset}<_{output}>", - "layer_rename_template": "{asset}_{subset}<_{output}>", - "layer_rename_patterns": [ - "rgb", - "rgba" - ] - }, - "LoadClipBatch": { - "enabled": true, - "families": [ - "render2d", - "source", - "plate", - "render", - "review" - ], - "reel_name": "OP_LoadedReel", - "clip_name_template": "{batch}_{asset}_{subset}<_{output}>", - "layer_rename_template": "{asset}_{subset}<_{output}>", - "layer_rename_patterns": [ - "rgb", - "rgba" - ] - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/ftrack.json b/client/ayon_core/settings/defaults/project_settings/ftrack.json deleted file mode 100644 index e2ca334b5f..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/ftrack.json +++ /dev/null @@ -1,522 +0,0 @@ -{ - "events": { - "sync_to_avalon": { - "role_list": [ - "Pypeclub", - "Administrator", - "Project manager" - ] - }, - "prepare_project": { - "enabled": true, - "role_list": [ - "Pypeclub", - "Administrator", - "Project manager" - ] - }, - "sync_hier_entity_attributes": { - "enabled": true, - "interest_entity_types": [ - "Shot", - "Asset Build" - ], - "interest_attributes": [ - "frameStart", - "frameEnd" - ], - "action_enabled": true, - "role_list": [ - "Pypeclub", - "Administrator", - "Project Manager" - ] - }, - "clone_review_session": { - "enabled": true, - "role_list": [ - "Pypeclub", - "Administrator", - "Project Manager" - ] - }, - "thumbnail_updates": { - "enabled": true, - "levels": 1 - }, - "user_assignment": { - "enabled": true - }, - "status_update": { - "enabled": true, - "mapping": { - "In Progress": [ - "__any__" - ], - "Ready": [ - "Not Ready" - ], - "__ignore__": [ - "in progress", - "omitted", - "on hold" - ] - } - }, - "status_task_to_parent": { - "enabled": true, - "parent_object_types": [ - "Shot", - "Asset Build" - ], - "parent_status_match_all_task_statuses": { - "Completed": [ - "Approved", - "Omitted" - ] - }, - "parent_status_by_task_status": [ - { - "new_status": "In Progress", - "task_statuses": [ - "in progress", - "change requested", - "retake", - "pending review" - ] - } - ] - }, - "status_task_to_version": { - "enabled": true, - "mapping": {}, - "asset_types_filter": [] - }, - "status_version_to_task": { - "enabled": true, - "mapping": {}, - "asset_types_to_skip": [] - }, - "next_task_update": { - "enabled": true, - "mapping": { - "Not Ready": "Ready" - }, - "ignored_statuses": [ - "Omitted" - ], - "name_sorting": false - }, - "transfer_values_of_hierarchical_attributes": { - "enabled": true, - "role_list": [ - "Administrator", - "Project manager" - ] - }, - "create_daily_review_session": { - "enabled": true, - "role_list": [ - "Administrator", - "Project Manager" - ], - "cycle_enabled": false, - "cycle_hour_start": [ - 0, - 0, - 0 - ], - "review_session_template": "{yy}{mm}{dd}" - } - }, - "user_handlers": { - "application_launch_statuses": { - "enabled": true, - "ignored_statuses": [ - "In Progress", - "Omitted", - "On hold", - "Approved" - ], - "status_change": { - "In Progress": [] - } - }, - "create_update_attributes": { - "role_list": [ - "Pypeclub", - "Administrator" - ] - }, - "prepare_project": { - "enabled": true, - "role_list": [ - "Pypeclub", - "Administrator", - "Project manager" - ], - "create_project_structure_checked": false - }, - "clean_hierarchical_attr": { - "enabled": true, - "role_list": [ - "Pypeclub", - "Administrator", - "Project manager" - ] - }, - "delete_asset_subset": { - "enabled": true, - "role_list": [ - "Pypeclub", - "Administrator", - "Project Manager" - ] - }, - "delete_old_versions": { - "enabled": true, - "role_list": [ - "Pypeclub", - "Project Manager", - "Administrator" - ] - }, - "delivery_action": { - "enabled": true, - "role_list": [ - "Pypeclub", - "Project Manager", - "Administrator" - ] - }, - "store_thubmnail_to_avalon": { - "enabled": true, - "role_list": [ - "Pypeclub", - "Project Manager", - "Administrator" - ] - }, - "job_killer": { - "enabled": true, - "role_list": [ - "Pypeclub", - "Administrator" - ] - }, - "sync_to_avalon_local": { - "enabled": true, - "role_list": [ - "Pypeclub", - "Administrator" - ] - }, - "fill_workfile_attribute": { - "enabled": false, - "custom_attribute_key": "", - "role_list": [] - }, - "seed_project": { - "enabled": true, - "role_list": [ - "Pypeclub" - ] - } - }, - "publish": { - "CollectFtrackFamily": { - "enabled": true, - "profiles": [ - { - "hosts": [ - "standalonepublisher" - ], - "families": [], - "task_types": [], - "tasks": [], - "add_ftrack_family": true, - "advanced_filtering": [] - }, - { - "hosts": [ - "standalonepublisher" - ], - "families": [ - "matchmove", - "shot" - ], - "task_types": [], - "tasks": [], - "add_ftrack_family": false, - "advanced_filtering": [] - }, - { - "hosts": [ - "standalonepublisher" - ], - "families": [ - "plate" - ], - "task_types": [], - "tasks": [], - "add_ftrack_family": false, - "advanced_filtering": [ - { - "families": [ - "clip", - "review" - ], - "add_ftrack_family": true - } - ] - }, - { - "hosts": [ - "traypublisher" - ], - "families": [], - "task_types": [], - "tasks": [], - "add_ftrack_family": true, - "advanced_filtering": [] - }, - { - "hosts": [ - "traypublisher" - ], - "families": [ - "matchmove", - "shot" - ], - "task_types": [], - "tasks": [], - "add_ftrack_family": false, - "advanced_filtering": [] - }, - { - "hosts": [ - "traypublisher" - ], - "families": [ - "plate", - "review", - "audio" - ], - "task_types": [], - "tasks": [], - "add_ftrack_family": false, - "advanced_filtering": [ - { - "families": [ - "clip", - "review" - ], - "add_ftrack_family": true - } - ] - }, - { - "hosts": [ - "maya" - ], - "families": [ - "model", - "setdress", - "animation", - "look", - "rig", - "camera", - "renderlayer" - ], - "task_types": [], - "tasks": [], - "add_ftrack_family": true, - "advanced_filtering": [] - }, - { - "hosts": [ - "tvpaint" - ], - "families": [ - "renderPass" - ], - "task_types": [], - "tasks": [], - "add_ftrack_family": false, - "advanced_filtering": [] - }, - { - "hosts": [ - "tvpaint" - ], - "families": [], - "task_types": [], - "tasks": [], - "add_ftrack_family": true, - "advanced_filtering": [] - }, - { - "hosts": [ - "nuke" - ], - "families": [ - "write", - "render", - "prerender" - ], - "task_types": [], - "tasks": [], - "add_ftrack_family": false, - "advanced_filtering": [ - { - "families": [ - "review" - ], - "add_ftrack_family": true - } - ] - }, - { - "hosts": [ - "aftereffects" - ], - "families": [ - "render", - "workfile" - ], - "task_types": [], - "tasks": [], - "add_ftrack_family": true, - "advanced_filtering": [] - }, - { - "hosts": [ - "flame" - ], - "families": [ - "plate", - "take" - ], - "task_types": [], - "tasks": [], - "add_ftrack_family": true, - "advanced_filtering": [] - }, - { - "hosts": [ - "houdini" - ], - "families": [ - "usd" - ], - "task_types": [], - "tasks": [], - "add_ftrack_family": true, - "advanced_filtering": [] - }, - { - "hosts": [ - "photoshop" - ], - "families": [ - "review" - ], - "task_types": [], - "tasks": [], - "add_ftrack_family": true, - "advanced_filtering": [] - } - ] - }, - "CollectFtrackCustomAttributeData": { - "enabled": false, - "custom_attribute_keys": [] - }, - "IntegrateHierarchyToFtrack": { - "create_task_status_profiles": [] - }, - "IntegrateFtrackNote": { - "enabled": true, - "note_template": "{intent}: {comment}", - "note_labels": [] - }, - "IntegrateFtrackDescription": { - "enabled": false, - "optional": true, - "active": true, - "description_template": "{comment}" - }, - "ValidateFtrackAttributes": { - "enabled": false, - "ftrack_custom_attributes": {} - }, - "IntegrateFtrackComponentOverwrite": { - "enabled": true - }, - "IntegrateFtrackInstance": { - "family_mapping": { - "camera": "cam", - "look": "look", - "mayaAscii": "scene", - "model": "geo", - "rig": "rig", - "setdress": "setdress", - "pointcache": "cache", - "render": "render", - "prerender": "render", - "render2d": "render", - "nukescript": "comp", - "write": "render", - "review": "mov", - "plate": "img", - "audio": "audio", - "workfile": "scene", - "animation": "cache", - "image": "img", - "reference": "reference", - "ass": "cache", - "mayaScene": "scene", - "camerarig": "rig", - "yeticache": "cache", - "yetiRig": "rig", - "xgen": "xgen", - "rendersetup": "rendersetup", - "assembly": "assembly", - "layout": "layout", - "unrealStaticMesh": "geo", - "vrayproxy": "cache", - "redshiftproxy": "cache", - "usd": "usd" - }, - "keep_first_subset_name_for_review": true, - "asset_versions_status_profiles": [], - "additional_metadata_keys": [], - "upload_reviewable_with_origin_name": false - }, - "IntegrateFtrackFarmStatus": { - "farm_status_profiles": [ - { - "hosts": [ - "celaction" - ], - "task_types": [], - "task_names": [], - "families": [ - "render" - ], - "subsets": [], - "status_name": "Render" - } - ] - }, - "ftrack_task_status_local_publish": { - "status_profiles": [] - }, - "ftrack_task_status_on_farm_publish": { - "status_profiles": [] - }, - "IntegrateFtrackTaskStatus": { - "after_version_statuses": true - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/fusion.json b/client/ayon_core/settings/defaults/project_settings/fusion.json deleted file mode 100644 index f890f94b6f..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/fusion.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "copy_fusion_settings": { - "copy_path": "~/.openpype/hosts/fusion/profiles", - "copy_status": false, - "force_sync": false - }, - "hooks": { - "InstallPySideToFusion": { - "enabled": true - } - }, - "create": { - "CreateSaver": { - "temp_rendering_path_template": "{workdir}/renders/fusion/{subset}/{subset}.{frame}.{ext}", - "default_variants": [ - "Main", - "Mask" - ], - "instance_attributes": [ - "reviewable", - "farm_rendering" - ], - "image_format": "exr", - "default_frame_range_option": "asset_db" - }, - "CreateImageSaver": { - "temp_rendering_path_template": "{workdir}/renders/fusion/{subset}/{subset}.{ext}", - "default_variants": [ - "Main", - "Mask" - ], - "instance_attributes": [ - "reviewable", - "farm_rendering" - ], - "image_format": "exr", - "default_frame": 0 - } - }, - "publish": { - "ValidateSaverResolution": { - "enabled": true, - "optional": true, - "active": true - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/global.json b/client/ayon_core/settings/defaults/project_settings/global.json deleted file mode 100644 index bb7e3266bd..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/global.json +++ /dev/null @@ -1,579 +0,0 @@ -{ - "version_start_category": { - "profiles": [] - }, - "imageio": { - "activate_global_color_management": false, - "ocio_config": { - "filepath": [ - "{BUILTIN_OCIO_ROOT}/aces_1.2/config.ocio", - "{BUILTIN_OCIO_ROOT}/nuke-default/config.ocio" - ] - }, - "file_rules": { - "activate_global_file_rules": false, - "rules": { - "example": { - "pattern": ".*(beauty).*", - "colorspace": "ACES - ACEScg", - "ext": "exr" - } - } - } - }, - "publish": { - "CollectAnatomyInstanceData": { - "follow_workfile_version": false - }, - "CollectAudio": { - "enabled": false, - "audio_subset_name": "audioMain" - }, - "CollectSceneVersion": { - "hosts": [ - "aftereffects", - "blender", - "celaction", - "fusion", - "harmony", - "hiero", - "houdini", - "maya", - "nuke", - "photoshop", - "resolve", - "tvpaint" - ], - "skip_hosts_headless_publish": [] - }, - "collect_comment_per_instance": { - "enabled": false, - "families": [] - }, - "CollectFramesFixDef": { - "enabled": true, - "rewrite_version_enable": true - }, - "ValidateEditorialAssetName": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateVersion": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateIntent": { - "enabled": false, - "profiles": [] - }, - "ExtractThumbnail": { - "enabled": true, - "integrate_thumbnail": false, - "background_color": [ - 0, - 0, - 0, - 255 - ], - "duration_split": 0.5, - "target_size": { - "type": "resize", - "width": 1920, - "height": 1080 - }, - "oiiotool_defaults": { - "type": "colorspace", - "colorspace": "color_picking", - "view": "sRGB", - "display": "default" - }, - "ffmpeg_args": { - "input": [ - "-apply_trc gamma22" - ], - "output": [] - } - }, - "ExtractOIIOTranscode": { - "enabled": true, - "profiles": [] - }, - "ExtractReview": { - "enabled": true, - "profiles": [ - { - "families": [], - "hosts": [], - "outputs": { - "png": { - "ext": "png", - "tags": [ - "ftrackreview", - "kitsureview" - ], - "burnins": [], - "ffmpeg_args": { - "video_filters": [], - "audio_filters": [], - "input": [], - "output": [] - }, - "filter": { - "families": [ - "render", - "review", - "ftrack" - ], - "subsets": [], - "custom_tags": [], - "single_frame_filter": "single_frame" - }, - "overscan_crop": "", - "overscan_color": [ - 0, - 0, - 0, - 255 - ], - "width": 1920, - "height": 1080, - "scale_pixel_aspect": true, - "bg_color": [ - 0, - 0, - 0, - 0 - ], - "letter_box": { - "enabled": false, - "ratio": 0.0, - "fill_color": [ - 0, - 0, - 0, - 255 - ], - "line_thickness": 0, - "line_color": [ - 255, - 0, - 0, - 255 - ] - } - }, - "h264": { - "ext": "mp4", - "tags": [ - "burnin", - "ftrackreview", - "kitsureview" - ], - "burnins": [], - "ffmpeg_args": { - "video_filters": [], - "audio_filters": [], - "input": [ - "-apply_trc gamma22" - ], - "output": [ - "-pix_fmt yuv420p", - "-crf 18", - "-intra" - ] - }, - "filter": { - "families": [ - "render", - "review", - "ftrack" - ], - "subsets": [], - "custom_tags": [], - "single_frame_filter": "multi_frame" - }, - "overscan_crop": "", - "overscan_color": [ - 0, - 0, - 0, - 255 - ], - "width": 0, - "height": 0, - "scale_pixel_aspect": true, - "bg_color": [ - 0, - 0, - 0, - 0 - ], - "letter_box": { - "enabled": false, - "ratio": 0.0, - "fill_color": [ - 0, - 0, - 0, - 255 - ], - "line_thickness": 0, - "line_color": [ - 255, - 0, - 0, - 255 - ] - } - } - } - } - ] - }, - "ExtractBurnin": { - "enabled": true, - "options": { - "font_size": 42, - "font_color": [ - 255, - 255, - 255, - 255 - ], - "bg_color": [ - 0, - 0, - 0, - 127 - ], - "x_offset": 5, - "y_offset": 5, - "bg_padding": 5, - "font_filepath": { - "windows": "", - "darwin": "", - "linux": "" - } - }, - "profiles": [ - { - "families": [], - "hosts": [], - "task_types": [], - "task_names": [], - "subsets": [], - "burnins": { - "burnin": { - "TOP_LEFT": "{yy}-{mm}-{dd}", - "TOP_CENTERED": "", - "TOP_RIGHT": "{anatomy[version]}", - "BOTTOM_LEFT": "{username}", - "BOTTOM_CENTERED": "{asset}", - "BOTTOM_RIGHT": "{frame_start}-{current_frame}-{frame_end}", - "filter": { - "families": [], - "tags": [] - } - } - } - }, - { - "families": [ - "review" - ], - "hosts": [ - "maya", - "houdini", - "max" - ], - "task_types": [], - "task_names": [], - "subsets": [], - "burnins": { - "focal_length_burnin": { - "TOP_LEFT": "{yy}-{mm}-{dd}", - "TOP_CENTERED": "{focalLength:.2f} mm", - "TOP_RIGHT": "{anatomy[version]}", - "BOTTOM_LEFT": "{username}", - "BOTTOM_CENTERED": "{asset}", - "BOTTOM_RIGHT": "{frame_start}-{current_frame}-{frame_end}", - "filter": { - "families": [], - "tags": [] - } - } - } - } - ] - }, - "PreIntegrateThumbnails": { - "enabled": true, - "integrate_profiles": [] - }, - "IntegrateSubsetGroup": { - "subset_grouping_profiles": [ - { - "families": [], - "hosts": [], - "task_types": [], - "tasks": [], - "template": "" - } - ] - }, - "IntegrateHeroVersion": { - "enabled": true, - "optional": true, - "active": true, - "families": [ - "model", - "rig", - "look", - "pointcache", - "animation", - "setdress", - "layout", - "mayaScene" - ], - "template_name_profiles": [] - }, - "CleanUp": { - "paterns": [], - "remove_temp_renders": false - }, - "CleanUpFarm": { - "enabled": false - } - }, - "tools": { - "creator": { - "families_smart_select": { - "Render": [ - "light", - "render" - ], - "Model": [ - "model" - ], - "Layout": [ - "layout" - ], - "Look": [ - "look" - ], - "Rig": [ - "rigging", - "rig" - ] - }, - "subset_name_profiles": [ - { - "families": [], - "hosts": [], - "task_types": [], - "tasks": [], - "template": "{family}{variant}" - }, - { - "families": [ - "workfile" - ], - "hosts": [], - "task_types": [], - "tasks": [], - "template": "{family}{Task}" - }, - { - "families": [ - "render" - ], - "hosts": [], - "task_types": [], - "tasks": [], - "template": "{family}{Task}{Variant}" - }, - { - "families": [ - "renderLayer", - "renderPass" - ], - "hosts": [ - "tvpaint" - ], - "task_types": [], - "tasks": [], - "template": "{family}{Task}_{Renderlayer}_{Renderpass}" - }, - { - "families": [ - "review", - "workfile" - ], - "hosts": [ - "aftereffects", - "tvpaint" - ], - "task_types": [], - "tasks": [], - "template": "{family}{Task}" - }, - { - "families": [ - "render" - ], - "hosts": [ - "aftereffects" - ], - "task_types": [], - "tasks": [], - "template": "{family}{Task}{Composition}{Variant}" - }, - { - "families": [ - "staticMesh" - ], - "hosts": [ - "maya" - ], - "task_types": [], - "tasks": [], - "template": "S_{asset}{variant}" - }, - { - "families": [ - "skeletalMesh" - ], - "hosts": [ - "maya" - ], - "task_types": [], - "tasks": [], - "template": "SK_{asset}{variant}" - } - ] - }, - "Workfiles": { - "workfile_template_profiles": [ - { - "task_types": [], - "hosts": [], - "workfile_template": "work" - }, - { - "task_types": [], - "hosts": [ - "unreal" - ], - "workfile_template": "unreal" - } - ], - "last_workfile_on_startup": [ - { - "hosts": [], - "task_types": [], - "tasks": [], - "enabled": true, - "use_last_published_workfile": false - } - ], - "open_workfile_tool_on_startup": [ - { - "hosts": [], - "task_types": [], - "tasks": [], - "enabled": false - } - ], - "extra_folders": [], - "workfile_lock_profiles": [] - }, - "loader": { - "family_filter_profiles": [ - { - "hosts": [], - "task_types": [], - "is_include": true, - "filter_families": [] - } - ] - }, - "publish": { - "template_name_profiles": [ - { - "families": [], - "hosts": [], - "task_types": [], - "task_names": [], - "template_name": "publish" - }, - { - "families": [ - "review", - "render", - "prerender" - ], - "hosts": [], - "task_types": [], - "task_names": [], - "template_name": "render" - }, - { - "families": [ - "staticMesh", - "skeletalMesh" - ], - "hosts": [ - "maya" - ], - "task_types": [], - "task_names": [], - "template_name": "maya2unreal" - }, - { - "families": [ - "online" - ], - "hosts": [ - "traypublisher" - ], - "task_types": [], - "task_names": [], - "template_name": "online" - }, - { - "families": [ - "tycache" - ], - "hosts": [ - "max" - ], - "task_types": [], - "task_names": [], - "template_name": "tycache" - } - ], - "hero_template_name_profiles": [], - "custom_staging_dir_profiles": [] - } - }, - "project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets\": {\"characters\": {}, \"locations\": {}}, \"shots\": {}}}", - "sync_server": { - "enabled": false, - "config": { - "retry_cnt": "3", - "loop_delay": "60", - "always_accessible_on": [], - "active_site": "studio", - "remote_site": "studio" - }, - "sites": {} - }, - "project_plugins": { - "windows": [], - "darwin": [], - "linux": [] - }, - "project_environments": {} -} diff --git a/client/ayon_core/settings/defaults/project_settings/harmony.json b/client/ayon_core/settings/defaults/project_settings/harmony.json deleted file mode 100644 index b424b43cc1..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/harmony.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "publish": { - "CollectPalettes": { - "allowed_tasks": [ - ".*" - ] - }, - "ValidateAudio": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateContainers": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateSceneSettings": { - "enabled": true, - "optional": true, - "active": true, - "frame_check_filter": [], - "skip_resolution_check": [], - "skip_timelines_check": [] - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/hiero.json b/client/ayon_core/settings/defaults/project_settings/hiero.json deleted file mode 100644 index 9c83733b09..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/hiero.json +++ /dev/null @@ -1,87 +0,0 @@ -{ - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - }, - "workfile": { - "ocioConfigName": "nuke-default", - "workingSpace": "linear", - "sixteenBitLut": "sRGB", - "eightBitLut": "sRGB", - "floatLut": "linear", - "logLut": "Cineon", - "viewerLut": "sRGB", - "thumbnailLut": "sRGB", - "monitorOutLut": "sRGB" - }, - "regexInputs": { - "inputs": [ - { - "regex": "[^-a-zA-Z0-9](plateRef).*(?=mp4)", - "colorspace": "sRGB" - } - ] - } - }, - "create": { - "CreateShotClip": { - "hierarchy": "{folder}/{sequence}", - "clipRename": true, - "clipName": "{track}{sequence}{shot}", - "countFrom": 10, - "countSteps": 10, - "folder": "shots", - "episode": "ep01", - "sequence": "sq01", - "track": "{_track_}", - "shot": "sh###", - "vSyncOn": false, - "workfileFrameStart": 1001, - "handleStart": 10, - "handleEnd": 10 - } - }, - "load": { - "LoadClip": { - "enabled": true, - "families": [ - "render2d", - "source", - "plate", - "render", - "review" - ], - "clip_name_template": "{asset}_{subset}_{representation}" - } - }, - "publish": { - "CollectInstanceVersion": { - "enabled": false - }, - "ExtractReviewCutUpVideo": { - "enabled": true, - "tags_addition": [ - "review" - ] - } - }, - "filters": {}, - "scriptsmenu": { - "name": "OpenPype Tools", - "definition": [ - { - "type": "action", - "sourcetype": "python", - "title": "OpenPype Docs", - "command": "import webbrowser;webbrowser.open(url='https://openpype.io/docs/artist_hosts_hiero')", - "tooltip": "Open the OpenPype Hiero user doc page" - } - ] - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/houdini.json b/client/ayon_core/settings/defaults/project_settings/houdini.json deleted file mode 100644 index 813e7153ea..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/houdini.json +++ /dev/null @@ -1,192 +0,0 @@ -{ - "general": { - "add_self_publish_button": false, - "update_houdini_var_context": { - "enabled": true, - "houdini_vars":[ - { - "var": "JOB", - "value": "{root[work]}/{project[name]}/{hierarchy}/{asset}/work/{task[name]}", - "is_directory": true - } - ] - } - }, - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "shelves": [], - "create": { - "CreateAlembicCamera": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateArnoldAss": { - "enabled": true, - "default_variants": [ - "Main" - ], - "ext": ".ass" - }, - "CreateArnoldRop": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateCompositeSequence": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateHDA": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateKarmaROP": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateMantraIFD": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateMantraROP": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreatePointCache": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateBGEO": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateRedshiftProxy": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateRedshiftROP": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateReview": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateStaticMesh": { - "enabled": true, - "default_variants": [ - "Main" - ], - "static_mesh_prefix": "S", - "collision_prefixes": [ - "UBX", - "UCP", - "USP", - "UCX" - ] - }, - "CreateUSD": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateUSDRender": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateVDBCache": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateVrayROP": { - "enabled": true, - "default_variants": [ - "Main" - ] - } - }, - "publish": { - "CollectAssetHandles": { - "use_asset_handles": true - }, - "CollectChunkSize": { - "enabled": true, - "optional": true, - "chunk_size": 999999 - }, - "ValidateContainers": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateMeshIsStatic": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateReviewColorspace": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateSubsetName": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateUnrealStaticMeshName": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateWorkfilePaths": { - "enabled": true, - "optional": true, - "node_types": [ - "file", - "alembic" - ], - "prohibited_vars": [ - "$HIP", - "$JOB" - ] - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/kitsu.json b/client/ayon_core/settings/defaults/project_settings/kitsu.json deleted file mode 100644 index 59a36d8b97..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/kitsu.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "entities_naming_pattern": { - "episode": "E##", - "sequence": "SQ##", - "shot": "SH##" - }, - "publish": { - "IntegrateKitsuNote": { - "set_status_note": false, - "note_status_shortname": "wfa", - "status_change_conditions": { - "status_conditions": [], - "family_requirements": [] - }, - "custom_comment_template": { - "enabled": false, - "comment_template": "{comment}\n\n| | |\n|--|--|\n| version| `{version}` |\n| family | `{family}` |\n| name | `{name}` |" - } - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/max.json b/client/ayon_core/settings/defaults/project_settings/max.json deleted file mode 100644 index d1610610dc..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/max.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "unit_scale_settings": { - "enabled": true, - "scene_unit_scale": "Meters" - }, - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "RenderSettings": { - "default_render_image_folder": "renders/3dsmax", - "aov_separator": "underscore", - "image_format": "exr", - "multipass": true - }, - "CreateReview": { - "review_width": 1920, - "review_height": 1080, - "percentSize": 100.0, - "keep_images": false, - "image_format": "png", - "visual_style": "Realistic", - "viewport_preset": "Quality", - "anti_aliasing": "None", - "vp_texture": true - }, - "PointCloud": { - "attribute": { - "Age": "age", - "Radius": "radius", - "Position": "position", - "Rotation": "rotation", - "Scale": "scale", - "Velocity": "velocity", - "Color": "color", - "TextureCoordinate": "texcoord", - "MaterialID": "matid", - "custFloats": "custFloats", - "custVecs": "custVecs" - } - }, - "publish": { - "ValidateFrameRange": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateAttributes": { - "enabled": false, - "attributes": {} - }, - "ValidateLoadedPlugin": { - "enabled": false, - "optional": true, - "family_plugins_mapping": [] - }, - "ExtractModelObj": { - "enabled": true, - "optional": true, - "active": false - }, - "ExtractModelFbx": { - "enabled": true, - "optional": true, - "active": false - }, - "ExtractModelUSD": { - "enabled": true, - "optional": true, - "active": false - }, - "ExtractModel": { - "enabled": true, - "optional": true, - "active": true - }, - "ExtractMaxSceneRaw": { - "enabled": true, - "optional": true, - "active": true - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/maya.json b/client/ayon_core/settings/defaults/project_settings/maya.json deleted file mode 100644 index b2dc0ccd65..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/maya.json +++ /dev/null @@ -1,1613 +0,0 @@ -{ - "open_workfile_post_initialization": false, - "explicit_plugins_loading": { - "enabled": false, - "plugins_to_load": [ - { - "enabled": false, - "name": "AbcBullet" - }, - { - "enabled": true, - "name": "AbcExport" - }, - { - "enabled": true, - "name": "AbcImport" - }, - { - "enabled": false, - "name": "animImportExport" - }, - { - "enabled": false, - "name": "ArubaTessellator" - }, - { - "enabled": false, - "name": "ATFPlugin" - }, - { - "enabled": false, - "name": "atomImportExport" - }, - { - "enabled": false, - "name": "AutodeskPacketFile" - }, - { - "enabled": false, - "name": "autoLoader" - }, - { - "enabled": false, - "name": "bifmeshio" - }, - { - "enabled": false, - "name": "bifrostGraph" - }, - { - "enabled": false, - "name": "bifrostshellnode" - }, - { - "enabled": false, - "name": "bifrostvisplugin" - }, - { - "enabled": false, - "name": "blast2Cmd" - }, - { - "enabled": false, - "name": "bluePencil" - }, - { - "enabled": false, - "name": "Boss" - }, - { - "enabled": false, - "name": "bullet" - }, - { - "enabled": true, - "name": "cacheEvaluator" - }, - { - "enabled": false, - "name": "cgfxShader" - }, - { - "enabled": false, - "name": "cleanPerFaceAssignment" - }, - { - "enabled": false, - "name": "clearcoat" - }, - { - "enabled": false, - "name": "convertToComponentTags" - }, - { - "enabled": false, - "name": "curveWarp" - }, - { - "enabled": false, - "name": "ddsFloatReader" - }, - { - "enabled": true, - "name": "deformerEvaluator" - }, - { - "enabled": false, - "name": "dgProfiler" - }, - { - "enabled": false, - "name": "drawUfe" - }, - { - "enabled": false, - "name": "dx11Shader" - }, - { - "enabled": false, - "name": "fbxmaya" - }, - { - "enabled": false, - "name": "fltTranslator" - }, - { - "enabled": false, - "name": "freeze" - }, - { - "enabled": false, - "name": "Fur" - }, - { - "enabled": false, - "name": "gameFbxExporter" - }, - { - "enabled": false, - "name": "gameInputDevice" - }, - { - "enabled": false, - "name": "GamePipeline" - }, - { - "enabled": false, - "name": "gameVertexCount" - }, - { - "enabled": false, - "name": "geometryReport" - }, - { - "enabled": false, - "name": "geometryTools" - }, - { - "enabled": false, - "name": "glslShader" - }, - { - "enabled": true, - "name": "GPUBuiltInDeformer" - }, - { - "enabled": false, - "name": "gpuCache" - }, - { - "enabled": false, - "name": "hairPhysicalShader" - }, - { - "enabled": false, - "name": "ik2Bsolver" - }, - { - "enabled": false, - "name": "ikSpringSolver" - }, - { - "enabled": false, - "name": "invertShape" - }, - { - "enabled": false, - "name": "lges" - }, - { - "enabled": false, - "name": "lookdevKit" - }, - { - "enabled": false, - "name": "MASH" - }, - { - "enabled": false, - "name": "matrixNodes" - }, - { - "enabled": false, - "name": "mayaCharacterization" - }, - { - "enabled": false, - "name": "mayaHIK" - }, - { - "enabled": false, - "name": "MayaMuscle" - }, - { - "enabled": false, - "name": "mayaUsdPlugin" - }, - { - "enabled": false, - "name": "mayaVnnPlugin" - }, - { - "enabled": false, - "name": "melProfiler" - }, - { - "enabled": false, - "name": "meshReorder" - }, - { - "enabled": true, - "name": "modelingToolkit" - }, - { - "enabled": false, - "name": "mtoa" - }, - { - "enabled": false, - "name": "mtoh" - }, - { - "enabled": false, - "name": "nearestPointOnMesh" - }, - { - "enabled": true, - "name": "objExport" - }, - { - "enabled": false, - "name": "OneClick" - }, - { - "enabled": false, - "name": "OpenEXRLoader" - }, - { - "enabled": false, - "name": "pgYetiMaya" - }, - { - "enabled": false, - "name": "pgyetiVrayMaya" - }, - { - "enabled": false, - "name": "polyBoolean" - }, - { - "enabled": false, - "name": "poseInterpolator" - }, - { - "enabled": false, - "name": "quatNodes" - }, - { - "enabled": false, - "name": "randomizerDevice" - }, - { - "enabled": false, - "name": "redshift4maya" - }, - { - "enabled": true, - "name": "renderSetup" - }, - { - "enabled": false, - "name": "retargeterNodes" - }, - { - "enabled": false, - "name": "RokokoMotionLibrary" - }, - { - "enabled": false, - "name": "rotateHelper" - }, - { - "enabled": false, - "name": "sceneAssembly" - }, - { - "enabled": false, - "name": "shaderFXPlugin" - }, - { - "enabled": false, - "name": "shotCamera" - }, - { - "enabled": false, - "name": "snapTransform" - }, - { - "enabled": false, - "name": "stage" - }, - { - "enabled": true, - "name": "stereoCamera" - }, - { - "enabled": false, - "name": "stlTranslator" - }, - { - "enabled": false, - "name": "studioImport" - }, - { - "enabled": false, - "name": "Substance" - }, - { - "enabled": false, - "name": "substancelink" - }, - { - "enabled": false, - "name": "substancemaya" - }, - { - "enabled": false, - "name": "substanceworkflow" - }, - { - "enabled": false, - "name": "svgFileTranslator" - }, - { - "enabled": false, - "name": "sweep" - }, - { - "enabled": false, - "name": "testify" - }, - { - "enabled": false, - "name": "tiffFloatReader" - }, - { - "enabled": false, - "name": "timeSliderBookmark" - }, - { - "enabled": false, - "name": "Turtle" - }, - { - "enabled": false, - "name": "Type" - }, - { - "enabled": false, - "name": "udpDevice" - }, - { - "enabled": false, - "name": "ufeSupport" - }, - { - "enabled": false, - "name": "Unfold3D" - }, - { - "enabled": false, - "name": "VectorRender" - }, - { - "enabled": false, - "name": "vrayformaya" - }, - { - "enabled": false, - "name": "vrayvolumegrid" - }, - { - "enabled": false, - "name": "xgenToolkit" - }, - { - "enabled": false, - "name": "xgenVray" - } - ] - }, - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - }, - "workfile": { - "enabled": false, - "renderSpace": "ACES - ACEScg", - "displayName": "ACES", - "viewName": "sRGB" - }, - "colorManagementPreference_v2": { - "enabled": true, - "renderSpace": "ACEScg", - "displayName": "sRGB", - "viewName": "ACES 1.0 SDR-video" - }, - "colorManagementPreference": { - "renderSpace": "scene-linear Rec 709/sRGB", - "viewTransform": "sRGB gamma" - } - }, - "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders/maya\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\nworkspace -fr \"autoSave\" \"autosave\";", - "ext_mapping": { - "model": "ma", - "mayaAscii": "ma", - "camera": "ma", - "rig": "ma", - "workfile": "ma", - "yetiRig": "ma" - }, - "maya-dirmap": { - "use_env_var_as_root": false, - "enabled": false, - "paths": { - "source-path": [], - "destination-path": [] - } - }, - "include_handles": { - "include_handles_default": false, - "per_task_type": [] - }, - "scriptsmenu": { - "name": "OpenPype Tools", - "definition": [ - { - "type": "action", - "command": "import ayon_core.hosts.maya.api.commands as op_cmds; op_cmds.edit_shader_definitions()", - "sourcetype": "python", - "title": "Edit shader name definitions", - "tooltip": "Edit shader name definitions used in validation and renaming.", - "tags": [ - "pipeline", - "shader" - ] - } - ] - }, - "RenderSettings": { - "apply_render_settings": true, - "default_render_image_folder": "renders/maya", - "enable_all_lights": true, - "aov_separator": "underscore", - "remove_aovs": false, - "reset_current_frame": false, - "arnold_renderer": { - "image_prefix": "//_", - "image_format": "exr", - "multilayer_exr": true, - "tiled": true, - "aov_list": [], - "additional_options": [] - }, - "vray_renderer": { - "image_prefix": "//", - "engine": "1", - "image_format": "exr", - "aov_list": [], - "additional_options": [] - }, - "redshift_renderer": { - "image_prefix": "//", - "primary_gi_engine": "0", - "secondary_gi_engine": "0", - "image_format": "exr", - "multilayer_exr": true, - "force_combine": true, - "aov_list": [], - "additional_options": [] - }, - "renderman_renderer": { - "image_prefix": "{aov_separator}..", - "image_dir": "/", - "display_filters": [], - "imageDisplay_dir": "/{aov_separator}imageDisplayFilter..", - "sample_filters": [], - "cryptomatte_dir": "/{aov_separator}cryptomatte..", - "watermark_dir": "/{aov_separator}watermarkFilter..", - "additional_options": [] - } - }, - "create": { - "CreateLook": { - "enabled": true, - "make_tx": true, - "rs_tex": false, - "default_variants": [ - "Main" - ] - }, - "CreateRender": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateUnrealStaticMesh": { - "enabled": true, - "default_variants": [ - "", - "_Main" - ], - "static_mesh_prefix": "S", - "collision_prefixes": [ - "UBX", - "UCP", - "USP", - "UCX" - ] - }, - "CreateUnrealSkeletalMesh": { - "enabled": true, - "default_variants": [ - "Main" - ], - "joint_hints": "jnt_org" - }, - "CreateMultiverseLook": { - "enabled": true, - "publish_mip_map": true - }, - "CreateAnimation": { - "write_color_sets": false, - "write_face_sets": false, - "include_parent_hierarchy": false, - "include_user_defined_attributes": false, - "default_variants": [ - "Main" - ] - }, - "CreateModel": { - "enabled": true, - "write_color_sets": false, - "write_face_sets": false, - "default_variants": [ - "Main", - "Proxy", - "Sculpt" - ] - }, - "CreatePointCache": { - "enabled": true, - "write_color_sets": false, - "write_face_sets": false, - "include_user_defined_attributes": false, - "default_variants": [ - "Main" - ] - }, - "CreateProxyAlembic": { - "enabled": true, - "write_color_sets": false, - "write_face_sets": false, - "default_variants": [ - "Main" - ] - }, - "CreateReview": { - "enabled": true, - "default_variants": [ - "Main" - ], - "useMayaTimeline": true - }, - "CreateAss": { - "enabled": true, - "default_variants": [ - "Main" - ], - "expandProcedurals": false, - "motionBlur": true, - "motionBlurKeys": 2, - "motionBlurLength": 0.5, - "maskOptions": false, - "maskCamera": false, - "maskLight": false, - "maskShape": false, - "maskShader": false, - "maskOverride": false, - "maskDriver": false, - "maskFilter": false, - "maskOperator": false, - "maskColor_manager": false - }, - "CreateVrayProxy": { - "enabled": true, - "vrmesh": true, - "alembic": true, - "default_variants": [ - "Main" - ] - }, - "CreateMultiverseUsd": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateMultiverseUsdComp": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateMultiverseUsdOver": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateAssembly": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateCamera": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateLayout": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateMayaScene": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateRenderSetup": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateRig": { - "enabled": true, - "default_variants": [ - "Main", - "Sim", - "Cloth" - ] - }, - "CreateSetDress": { - "enabled": true, - "default_variants": [ - "Main", - "Anim" - ] - }, - "CreateVRayScene": { - "enabled": true, - "default_variants": [ - "Main" - ] - }, - "CreateYetiRig": { - "enabled": true, - "default_variants": [ - "Main" - ] - } - }, - "publish": { - "CollectMayaRender": { - "sync_workfile_version": false - }, - "CollectFbxAnimation": { - "enabled": true - }, - "CollectFbxCamera": { - "enabled": false - }, - "CollectGLTF": { - "enabled": false - }, - "ValidateInstanceInContext": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateContainers": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateFrameRange": { - "enabled": true, - "optional": true, - "active": true, - "exclude_families": [ - "model", - "rig", - "staticMesh" - ] - }, - "ValidateShaderName": { - "enabled": false, - "optional": true, - "active": true, - "regex": "(?P.*)_(.*)_SHD" - }, - "ValidateShadingEngine": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateMayaColorSpace": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateAttributes": { - "enabled": false, - "attributes": {} - }, - "ValidateLoadedPlugin": { - "enabled": false, - "optional": true, - "whitelist_native_plugins": false, - "authorized_plugins": [] - }, - "ValidateMayaUnits": { - "enabled": true, - "optional": false, - "validate_linear_units": true, - "linear_units": "cm", - "validate_angular_units": true, - "angular_units": "deg", - "validate_fps": true - }, - "ValidateUnrealStaticMeshName": { - "enabled": true, - "optional": true, - "validate_mesh": false, - "validate_collision": true - }, - "ValidateCycleError": { - "enabled": true, - "optional": false, - "families": [ - "rig" - ] - }, - "ValidatePluginPathAttributes": { - "enabled": true, - "optional": false, - "active": true, - "attribute": { - "AlembicNode": "abc_File", - "VRayProxy": "fileName", - "RenderManArchive": "filename", - "pgYetiMaya": "cacheFileName", - "aiStandIn": "dso", - "RedshiftSprite": "tex0", - "RedshiftBokeh": "dofBokehImage", - "RedshiftCameraMap": "tex0", - "RedshiftEnvironment": "tex2", - "RedshiftDomeLight": "tex1", - "RedshiftIESLight": "profile", - "RedshiftLightGobo": "tex0", - "RedshiftNormalMap": "tex0", - "RedshiftProxyMesh": "fileName", - "RedshiftVolumeShape": "fileName", - "VRayTexGLSL": "fileName", - "VRayMtlGLSL": "fileName", - "VRayVRmatMtl": "fileName", - "VRayPtex": "ptexFile", - "VRayLightIESShape": "iesFile", - "VRayMesh": "materialAssignmentsFile", - "VRayMtlOSL": "fileName", - "VRayTexOSL": "fileName", - "VRayTexOCIO": "ocioConfigFile", - "VRaySettingsNode": "pmap_autoSaveFile2", - "VRayScannedMtl": "file", - "VRayScene": "parameterOverrideFilePath", - "VRayMtlMDL": "filename", - "VRaySimbiont": "file", - "dlOpenVDBShape": "filename", - "pgYetiMayaShape": "liveABCFilename", - "gpuCache": "cacheFileName" - } - }, - "ValidateRenderSettings": { - "arnold_render_attributes": [], - "vray_render_attributes": [], - "redshift_render_attributes": [], - "renderman_render_attributes": [] - }, - "ValidateResolution": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateCurrentRenderLayerIsRenderable": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateGLSLMaterial": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateGLSLPlugin": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateRenderImageRule": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateRenderNoDefaultCameras": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateRenderSingleCamera": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateRenderLayerAOVs": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateStepSize": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateVRayDistributedRendering": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateVrayReferencedAOVs": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateVRayTranslatorEnabled": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateVrayProxy": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateVrayProxyMembers": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateYetiRenderScriptCallbacks": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateYetiRigCacheState": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateYetiRigInputShapesInInstance": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateYetiRigSettings": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateModelName": { - "enabled": false, - "database": true, - "material_file": { - "windows": "", - "darwin": "", - "linux": "" - }, - "regex": "(.*)_(\\d)*_(?P.*)_(GEO)", - "top_level_regex": ".*_GRP" - }, - "ValidateModelContent": { - "enabled": true, - "optional": false, - "validate_top_group": true - }, - "ValidateTransformNamingSuffix": { - "enabled": true, - "optional": true, - "SUFFIX_NAMING_TABLE": { - "mesh": [ - "_GEO", - "_GES", - "_GEP", - "_OSD" - ], - "nurbsCurve": [ - "_CRV" - ], - "nurbsSurface": [ - "_NRB" - ], - "locator": [ - "_LOC" - ], - "group": [ - "_GRP" - ] - }, - "ALLOW_IF_NOT_IN_SUFFIX_TABLE": true - }, - "ValidateColorSets": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateMeshHasOverlappingUVs": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateMeshArnoldAttributes": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateMeshShaderConnections": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateMeshSingleUVSet": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateMeshHasUVs": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateMeshLaminaFaces": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateMeshNgons": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateMeshNonManifold": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateMeshNoNegativeScale": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateMeshNonZeroEdgeLength": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateMeshNormalsUnlocked": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateMeshUVSetMap1": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateMeshVerticesHaveEdges": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateNoAnimation": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateNoNamespace": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateNoNullTransforms": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateNoUnknownNodes": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateNodeNoGhosting": { - "enabled": false, - "optional": false, - "active": true - }, - "ValidateShapeDefaultNames": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateShapeRenderStats": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateShapeZero": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateTransformZero": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateUniqueNames": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateNoVRayMesh": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateUnrealMeshTriangulated": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateAlembicVisibleOnly": { - "enabled": true, - "optional": false, - "active": true - }, - "ExtractProxyAlembic": { - "enabled": true, - "families": [ - "proxyAbc" - ] - }, - "ExtractAlembic": { - "enabled": true, - "families": [ - "pointcache", - "model", - "vrayproxy.alembic" - ] - }, - "ExtractObj": { - "enabled": false, - "optional": true - }, - "ValidateRigContents": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateRigJointsHidden": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateRigControllers": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateAnimatedReferenceRig": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateAnimationContent": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateOutRelatedNodeIds": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateRigControllersArnoldAttributes": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateSkeletalMeshHierarchy": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateSkeletonRigContents": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateSkeletonRigControllers": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateSkinclusterDeformerSet": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateRigOutSetNodeIds": { - "enabled": true, - "optional": false, - "allow_history_only": false - }, - "ValidateSkeletonRigOutSetNodeIds": { - "enabled": false, - "optional": false, - "allow_history_only": false - }, - "ValidateSkeletonRigOutputIds": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateSkeletonTopGroupHierarchy": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateCameraAttributes": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateAssemblyName": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateAssemblyNamespaces": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateAssemblyModelTransforms": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateAssRelativePaths": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateInstancerContent": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateInstancerFrameRanges": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateNoDefaultCameras": { - "enabled": true, - "optional": false, - "active": true - }, - "ValidateUnrealUpAxis": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateCameraContents": { - "enabled": true, - "optional": false, - "validate_shapes": true - }, - "ExtractPlayblast": { - "capture_preset": { - "Codec": { - "compression": "png", - "format": "image", - "quality": 95 - }, - "Display Options": { - "override_display": true, - "background": [ - 125, - 125, - 125, - 255 - ], - "backgroundBottom": [ - 125, - 125, - 125, - 255 - ], - "backgroundTop": [ - 125, - 125, - 125, - 255 - ], - "displayGradient": true - }, - "Generic": { - "isolate_view": true, - "off_screen": true, - "pan_zoom": false - }, - "Renderer": { - "rendererName": "vp2Renderer" - }, - "Resolution": { - "width": 1920, - "height": 1080 - }, - "Viewport Options": { - "override_viewport_options": true, - "displayLights": "default", - "displayTextures": true, - "textureMaxResolution": 1024, - "renderDepthOfField": true, - "shadows": true, - "twoSidedLighting": true, - "lineAAEnable": true, - "multiSample": 8, - "loadTextures": false, - "useDefaultMaterial": false, - "wireframeOnShaded": false, - "xray": false, - "jointXray": false, - "backfaceCulling": false, - "ssaoEnable": false, - "ssaoAmount": 1, - "ssaoRadius": 16, - "ssaoFilterRadius": 16, - "ssaoSamples": 16, - "fogging": false, - "hwFogFalloff": "0", - "hwFogDensity": 0.0, - "hwFogStart": 0, - "hwFogEnd": 100, - "hwFogAlpha": 0, - "hwFogColorR": 1.0, - "hwFogColorG": 1.0, - "hwFogColorB": 1.0, - "motionBlurEnable": false, - "motionBlurSampleCount": 8, - "motionBlurShutterOpenFraction": 0.2, - "cameras": false, - "clipGhosts": false, - "deformers": false, - "dimensions": false, - "dynamicConstraints": false, - "dynamics": false, - "fluids": false, - "follicles": false, - "greasePencils": false, - "grid": false, - "hairSystems": true, - "handles": false, - "headsUpDisplay": false, - "ikHandles": false, - "imagePlane": true, - "joints": false, - "lights": false, - "locators": false, - "manipulators": false, - "motionTrails": false, - "nCloths": false, - "nParticles": false, - "nRigids": false, - "controlVertices": false, - "nurbsCurves": false, - "hulls": false, - "nurbsSurfaces": false, - "particleInstancers": false, - "pivots": false, - "planes": false, - "pluginShapes": false, - "polymeshes": true, - "strokes": false, - "subdivSurfaces": false, - "textures": false, - "pluginObjects": { - "gpuCacheDisplayFilter": false - } - }, - "Camera Options": { - "displayGateMask": false, - "displayResolution": false, - "displayFilmGate": false, - "displayFieldChart": false, - "displaySafeAction": false, - "displaySafeTitle": false, - "displayFilmPivot": false, - "displayFilmOrigin": false, - "overscan": 1.0 - } - }, - "profiles": [] - }, - "ExtractMayaSceneRaw": { - "enabled": true, - "add_for_families": [ - "layout" - ] - }, - "ExtractCameraAlembic": { - "enabled": true, - "optional": true, - "active": true, - "bake_attributes": [] - }, - "ExtractCameraMayaScene": { - "enabled": true, - "optional": true, - "active": true, - "keep_image_planes": false - }, - "ExtractGLB": { - "enabled": true, - "active": true, - "ogsfx_path": "/maya2glTF/PBR/shaders/glTF_PBR.ogsfx" - }, - "ExtractLook": { - "maketx_arguments": [] - }, - "ExtractGPUCache": { - "enabled": false, - "families": [ - "model", - "animation", - "pointcache" - ], - "step": 1.0, - "stepSave": 1, - "optimize": true, - "optimizationThreshold": 40000, - "optimizeAnimationsForMotionBlur": true, - "writeMaterials": true, - "useBaseTessellation": true - } - }, - "load": { - "colors": { - "model": [ - 209, - 132, - 30, - 255 - ], - "rig": [ - 59, - 226, - 235, - 255 - ], - "pointcache": [ - 94, - 209, - 30, - 255 - ], - "animation": [ - 94, - 209, - 30, - 255 - ], - "ass": [ - 249, - 135, - 53, - 255 - ], - "camera": [ - 136, - 114, - 244, - 255 - ], - "fbx": [ - 215, - 166, - 255, - 255 - ], - "mayaAscii": [ - 67, - 174, - 255, - 255 - ], - "mayaScene": [ - 67, - 174, - 255, - 255 - ], - "setdress": [ - 255, - 250, - 90, - 255 - ], - "layout": [ - 255, - 250, - 90, - 255 - ], - "vdbcache": [ - 249, - 54, - 0, - 255 - ], - "vrayproxy": [ - 255, - 150, - 12, - 255 - ], - "vrayscene_layer": [ - 255, - 150, - 12, - 255 - ], - "yeticache": [ - 99, - 206, - 220, - 255 - ], - "yetiRig": [ - 0, - 205, - 125, - 255 - ] - }, - "reference_loader": { - "namespace": "{asset_name}_{subset}_##_", - "group_name": "_GRP", - "display_handle": true - }, - "import_loader": { - "namespace": "{asset_name}_{subset}_##_", - "group_name": "_GRP" - } - }, - "workfile_build": { - "profiles": [ - { - "task_types": [], - "tasks": [ - "Lighting" - ], - "current_context": [ - { - "subset_name_filters": [ - ".+[Mm]ain" - ], - "families": [ - "model" - ], - "repre_names": [ - "abc", - "ma" - ], - "loaders": [ - "ReferenceLoader" - ] - }, - { - "subset_name_filters": [], - "families": [ - "animation", - "pointcache", - "proxyAbc" - ], - "repre_names": [ - "abc" - ], - "loaders": [ - "ReferenceLoader" - ] - }, - { - "subset_name_filters": [], - "families": [ - "rendersetup" - ], - "repre_names": [ - "json" - ], - "loaders": [ - "RenderSetupLoader" - ] - }, - { - "subset_name_filters": [], - "families": [ - "camera" - ], - "repre_names": [ - "abc" - ], - "loaders": [ - "ReferenceLoader" - ] - } - ], - "linked_assets": [ - { - "subset_name_filters": [], - "families": [ - "sedress" - ], - "repre_names": [ - "ma" - ], - "loaders": [ - "ReferenceLoader" - ] - }, - { - "subset_name_filters": [], - "families": [ - "ArnoldStandin" - ], - "repre_names": [ - "ass" - ], - "loaders": [ - "assLoader" - ] - } - ] - } - ] - }, - "templated_workfile_build": { - "profiles": [] - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/nuke.json b/client/ayon_core/settings/defaults/project_settings/nuke.json deleted file mode 100644 index 11b2988c67..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/nuke.json +++ /dev/null @@ -1,544 +0,0 @@ -{ - "general": { - "menu": { - "create": "ctrl+alt+c", - "publish": "ctrl+alt+p", - "load": "ctrl+alt+l", - "manage": "ctrl+alt+m", - "build_workfile": "ctrl+alt+b" - } - }, - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - }, - "viewer": { - "viewerProcess": "sRGB (default)" - }, - "baking": { - "viewerProcess": "rec709 (default)" - }, - "workfile": { - "colorManagement": "OCIO", - "OCIO_config": "nuke-default", - "workingSpaceLUT": "scene_linear", - "monitorLut": "sRGB (default)" - }, - "nodes": { - "requiredNodes": [ - { - "plugins": [ - "CreateWriteRender" - ], - "nukeNodeClass": "Write", - "knobs": [ - { - "type": "text", - "name": "file_type", - "value": "exr" - }, - { - "type": "text", - "name": "datatype", - "value": "16 bit half" - }, - { - "type": "text", - "name": "compression", - "value": "Zip (1 scanline)" - }, - { - "type": "bool", - "name": "autocrop", - "value": true - }, - { - "type": "color_gui", - "name": "tile_color", - "value": [ - 186, - 35, - 35, - 255 - ] - }, - { - "type": "text", - "name": "channels", - "value": "rgb" - }, - { - "type": "text", - "name": "colorspace", - "value": "scene_linear" - }, - { - "type": "bool", - "name": "create_directories", - "value": true - } - ] - }, - { - "plugins": [ - "CreateWritePrerender" - ], - "nukeNodeClass": "Write", - "knobs": [ - { - "type": "text", - "name": "file_type", - "value": "exr" - }, - { - "type": "text", - "name": "datatype", - "value": "16 bit half" - }, - { - "type": "text", - "name": "compression", - "value": "Zip (1 scanline)" - }, - { - "type": "bool", - "name": "autocrop", - "value": true - }, - { - "type": "color_gui", - "name": "tile_color", - "value": [ - 171, - 171, - 10, - 255 - ] - }, - { - "type": "text", - "name": "channels", - "value": "rgb" - }, - { - "type": "text", - "name": "colorspace", - "value": "scene_linear" - }, - { - "type": "bool", - "name": "create_directories", - "value": true - } - ] - }, - { - "plugins": [ - "CreateWriteImage" - ], - "nukeNodeClass": "Write", - "knobs": [ - { - "type": "text", - "name": "file_type", - "value": "tiff" - }, - { - "type": "text", - "name": "datatype", - "value": "16 bit" - }, - { - "type": "text", - "name": "compression", - "value": "Deflate" - }, - { - "type": "color_gui", - "name": "tile_color", - "value": [ - 56, - 162, - 7, - 255 - ] - }, - { - "type": "text", - "name": "channels", - "value": "rgb" - }, - { - "type": "text", - "name": "colorspace", - "value": "texture_paint" - }, - { - "type": "bool", - "name": "create_directories", - "value": true - } - ] - } - ], - "overrideNodes": [] - }, - "regexInputs": { - "inputs": [ - { - "regex": "(beauty).*(?=.exr)", - "colorspace": "scene_linear" - } - ] - } - }, - "nuke-dirmap": { - "enabled": false, - "paths": { - "source-path": [], - "destination-path": [] - } - }, - "scriptsmenu": { - "name": "OpenPype Tools", - "definition": [ - { - "type": "action", - "sourcetype": "python", - "title": "OpenPype Docs", - "command": "import webbrowser;webbrowser.open(url='https://openpype.io/docs/artist_hosts_nuke_tut')", - "tooltip": "Open the OpenPype Nuke user doc page" - }, - { - "type": "action", - "sourcetype": "python", - "title": "Set Frame Start (Read Node)", - "command": "from ayon_core.hosts.nuke.startup.frame_setting_for_read_nodes import main;main();", - "tooltip": "Set frame start for read node(s)" - }, - { - "type": "action", - "sourcetype": "python", - "title": "Set non publish output for Write Node", - "command": "from ayon_core.hosts.nuke.startup.custom_write_node import main;main();", - "tooltip": "Open the OpenPype Nuke user doc page" - } - ] - }, - "gizmo": [ - { - "toolbar_menu_name": "OpenPype Gizmo", - "gizmo_source_dir": { - "windows": [], - "darwin": [], - "linux": [] - }, - "toolbar_icon_path": { - "windows": "", - "darwin": "", - "linux": "" - }, - "gizmo_definition": [ - { - "gizmo_toolbar_path": "/path/to/menu", - "sub_gizmo_list": [ - { - "sourcetype": "python", - "title": "Gizmo Note", - "command": "nuke.nodes.StickyNote(label='You can create your own toolbar menu in the Nuke GizmoMenu of OpenPype')", - "icon": "", - "shortcut": "" - } - ] - } - ] - } - ], - "create": { - "CreateWriteRender": { - "temp_rendering_path_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}", - "default_variants": [ - "Main", - "Mask" - ], - "instance_attributes": [ - "reviewable", - "farm_rendering" - ], - "prenodes": { - "Reformat01": { - "nodeclass": "Reformat", - "dependent": "", - "knobs": [ - { - "type": "text", - "name": "resize", - "value": "none" - }, - { - "type": "bool", - "name": "black_outside", - "value": true - } - ] - } - } - }, - "CreateWritePrerender": { - "temp_rendering_path_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}", - "default_variants": [ - "Key01", - "Bg01", - "Fg01", - "Branch01", - "Part01" - ], - "instance_attributes": [ - "farm_rendering", - "use_range_limit" - ], - "prenodes": {} - }, - "CreateWriteImage": { - "temp_rendering_path_template": "{work}/renders/nuke/{subset}/{subset}.{ext}", - "default_variants": [ - "StillFrame", - "MPFrame", - "LayoutFrame" - ], - "instance_attributes": [ - "use_range_limit" - ], - "prenodes": { - "FrameHold01": { - "nodeclass": "FrameHold", - "dependent": "", - "knobs": [ - { - "type": "expression", - "name": "first_frame", - "expression": "parent.first" - } - ] - } - } - } - }, - "publish": { - "CollectInstanceData": { - "sync_workfile_version_on_families": [ - "nukenodes", - "camera", - "gizmo", - "source", - "render", - "write" - ] - }, - "ValidateCorrectAssetContext": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateContainers": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateKnobs": { - "enabled": false, - "knobs": { - "render": { - "review": true - } - } - }, - "ValidateOutputResolution": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateBackdrop": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateGizmo": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateScriptAttributes": { - "enabled": true, - "optional": true, - "active": true - }, - "ExtractReviewData": { - "enabled": false - }, - "ExtractReviewDataLut": { - "enabled": false - }, - "ExtractReviewDataMov": { - "enabled": true, - "viewer_lut_raw": false, - "outputs": { - "baking": { - "filter": { - "task_types": [], - "families": [], - "subsets": [] - }, - "read_raw": false, - "viewer_process_override": "", - "bake_viewer_process": true, - "bake_viewer_input_process": true, - "reformat_nodes_config": { - "enabled": false, - "reposition_nodes": [ - { - "node_class": "Reformat", - "knobs": [ - { - "type": "text", - "name": "type", - "value": "to format" - }, - { - "type": "text", - "name": "format", - "value": "HD_1080" - }, - { - "type": "text", - "name": "filter", - "value": "Lanczos6" - }, - { - "type": "bool", - "name": "black_outside", - "value": true - }, - { - "type": "bool", - "name": "pbb", - "value": false - } - ] - } - ] - }, - "extension": "mov", - "add_custom_tags": [] - } - } - }, - "ExtractReviewIntermediates": { - "enabled": true, - "viewer_lut_raw": false, - "outputs": { - "baking": { - "filter": { - "task_types": [], - "families": [], - "subsets": [] - }, - "read_raw": false, - "viewer_process_override": "", - "bake_viewer_process": true, - "bake_viewer_input_process": true, - "reformat_nodes_config": { - "enabled": false, - "reposition_nodes": [ - { - "node_class": "Reformat", - "knobs": [ - { - "type": "text", - "name": "type", - "value": "to format" - }, - { - "type": "text", - "name": "format", - "value": "HD_1080" - }, - { - "type": "text", - "name": "filter", - "value": "Lanczos6" - }, - { - "type": "bool", - "name": "black_outside", - "value": true - }, - { - "type": "bool", - "name": "pbb", - "value": false - } - ] - } - ] - }, - "extension": "mov", - "add_custom_tags": [] - } - } - }, - "ExtractSlateFrame": { - "viewer_lut_raw": false, - "key_value_mapping": { - "f_submission_note": [ - true, - "{comment}" - ], - "f_submitting_for": [ - true, - "{intent[value]}" - ], - "f_vfx_scope_of_work": [ - false, - "" - ] - } - }, - "IncrementScriptVersion": { - "enabled": true, - "optional": true, - "active": true - } - }, - "load": { - "LoadImage": { - "enabled": true, - "_representations": [], - "node_name_template": "{class_name}_{ext}" - }, - "LoadClip": { - "enabled": true, - "_representations": [], - "node_name_template": "{class_name}_{ext}", - "options_defaults": { - "start_at_workfile": true, - "add_retime": true - } - } - }, - "workfile_builder": { - "create_first_version": false, - "custom_templates": [], - "builder_on_start": false, - "profiles": [] - }, - "templated_workfile_build": { - "profiles": [] - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/photoshop.json b/client/ayon_core/settings/defaults/project_settings/photoshop.json deleted file mode 100644 index 71f94f5bfc..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/photoshop.json +++ /dev/null @@ -1,91 +0,0 @@ -{ - "imageio": { - "activate_host_color_management": true, - "remapping": { - "rules": [] - }, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "create": { - "ImageCreator": { - "enabled": true, - "active_on_create": true, - "mark_for_review": false, - "default_variants": [ - "Main" - ] - }, - "AutoImageCreator": { - "enabled": false, - "active_on_create": true, - "mark_for_review": false, - "default_variant": "" - }, - "ReviewCreator": { - "enabled": true, - "active_on_create": true, - "default_variant": "" - }, - "WorkfileCreator": { - "enabled": true, - "active_on_create": true, - "default_variant": "Main" - } - }, - "publish": { - "CollectColorCodedInstances": { - "enabled": true, - "create_flatten_image": "no", - "flatten_subset_template": "", - "color_code_mapping": [] - }, - "CollectReview": { - "enabled": true - }, - "CollectVersion": { - "enabled": false - }, - "ValidateContainers": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateNaming": { - "invalid_chars": "[ \\\\/+\\*\\?\\(\\)\\[\\]\\{\\}:,;]", - "replace_char": "_" - }, - "ExtractImage": { - "formats": [ - "png", - "jpg" - ] - }, - "ExtractReview": { - "make_image_sequence": false, - "max_downscale_size": 8192, - "jpg_options": { - "tags": [ - "review", - "ftrackreview" - ] - }, - "mov_options": { - "tags": [ - "review", - "ftrackreview" - ] - } - } - }, - "workfile_builder": { - "create_first_version": false, - "custom_templates": [] - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/resolve.json b/client/ayon_core/settings/defaults/project_settings/resolve.json deleted file mode 100644 index 95b3cc66b3..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/resolve.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "launch_openpype_menu_on_start": false, - "imageio": { - "activate_host_color_management": true, - "remapping": { - "rules": [] - }, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "create": { - "CreateShotClip": { - "hierarchy": "{folder}/{sequence}", - "clipRename": true, - "clipName": "{track}{sequence}{shot}", - "countFrom": 10, - "countSteps": 10, - "folder": "shots", - "episode": "ep01", - "sequence": "sq01", - "track": "{_track_}", - "shot": "sh###", - "vSyncOn": false, - "workfileFrameStart": 1001, - "handleStart": 10, - "handleEnd": 10 - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/royalrender.json b/client/ayon_core/settings/defaults/project_settings/royalrender.json deleted file mode 100644 index 14e36058aa..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/royalrender.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "rr_paths": [ - "default" - ], - "publish": { - "CollectSequencesFromJob": { - "review": true - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/shotgrid.json b/client/ayon_core/settings/defaults/project_settings/shotgrid.json deleted file mode 100644 index 83b6f69074..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/shotgrid.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "shotgrid_project_id": 0, - "shotgrid_server": "", - "event": { - "enabled": false - }, - "fields": { - "asset": { - "type": "sg_asset_type" - }, - "sequence": { - "episode_link": "episode" - }, - "shot": { - "episode_link": "sg_episode", - "sequence_link": "sg_sequence" - }, - "task": { - "step": "step" - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/slack.json b/client/ayon_core/settings/defaults/project_settings/slack.json deleted file mode 100644 index 910f099d04..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/slack.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "token": "", - "publish": { - "CollectSlackFamilies": { - "enabled": true, - "optional": true, - "profiles": [ - { - "families": [], - "hosts": [], - "task_types": [], - "tasks": [], - "subsets": [], - "review_upload_limit": 50.0, - "channel_messages": [] - } - ] - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/standalonepublisher.json b/client/ayon_core/settings/defaults/project_settings/standalonepublisher.json deleted file mode 100644 index 44982133eb..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/standalonepublisher.json +++ /dev/null @@ -1,299 +0,0 @@ -{ - "create": { - "create_workfile": { - "name": "workfile", - "label": "Workfile", - "family": "workfile", - "icon": "cube", - "defaults": [ - "Main" - ], - "help": "Working scene backup" - }, - "create_model": { - "name": "model", - "label": "Model", - "family": "model", - "icon": "cube", - "defaults": [ - "Main" - ], - "help": "Polygonal static geometry" - }, - "create_rig": { - "name": "rig", - "label": "Rig", - "family": "rig", - "icon": "wheelchair", - "defaults": [ - "Main", - "Cloth" - ], - "help": "Artist-friendly rig with controls" - }, - "create_pointcache": { - "name": "pointcache", - "label": "Pointcache", - "family": "pointcache", - "icon": "gears", - "defaults": [ - "Main" - ], - "help": "Alembic pointcache for animated data" - }, - "create_plate": { - "name": "plate", - "label": "Plate", - "family": "plate", - "icon": "camera", - "defaults": [ - "Main", - "BG", - "Animatic", - "Reference", - "Offline" - ], - "help": "Footage for composting or reference" - }, - "create_camera": { - "name": "camera", - "label": "Camera", - "family": "camera", - "icon": "camera", - "defaults": [ - "Main" - ], - "help": "video-camera" - }, - "create_editorial": { - "name": "editorial", - "label": "Editorial", - "family": "editorial", - "icon": "image", - "defaults": [ - "Main" - ], - "help": "Editorial files to generate shots." - }, - "create_image": { - "name": "image", - "label": "Image file", - "family": "image", - "icon": "image", - "defaults": [ - "Reference", - "Texture", - "ConceptArt", - "MattePaint" - ], - "help": "Holder for all kinds of image data" - }, - "create_matchmove": { - "name": "matchmove", - "label": "Matchmove Scripts", - "family": "matchmove", - "icon": "empire", - "defaults": [ - "Camera", - "Object", - "Mocap" - ], - "help": "Script exported from matchmoving application" - }, - "create_render": { - "name": "render", - "label": "Render", - "family": "render", - "icon": "image", - "defaults": [ - "Animation", - "Lighting", - "Lookdev", - "Compositing" - ], - "help": "Rendered images or video files" - }, - "create_mov_batch": { - "name": "mov_batch", - "label": "Batch Mov", - "family": "render_mov_batch", - "icon": "image", - "defaults": [ - "Main" - ], - "help": "Process multiple Mov files and publish them for layout and comp." - }, - "create_texture_batch": { - "name": "texture_batch", - "label": "Texture Batch", - "family": "texture_batch", - "icon": "image", - "defaults": [ - "Main" - ], - "help": "Texture files with UDIM together with worfile" - }, - "create_vdb": { - "name": "vdb", - "label": "VDB Volumetric Data", - "family": "vdbcache", - "icon": "cloud", - "defaults": [], - "help": "Hierarchical data structure for the efficient storage and manipulation of sparse volumetric data discretized on three-dimensional grids" - }, - "__dynamic_keys_labels__": { - "create_workfile": "Workfile", - "create_model": "Model", - "create_rig": "Rig", - "create_pointcache": "Pointcache", - "create_plate": "Plate", - "create_camera": "Camera", - "create_editorial": "Editorial", - "create_image": "Image", - "create_matchmove": "Matchmove", - "create_render": "Render", - "create_mov_batch": "Batch Mov", - "create_texture_batch": "Batch Texture", - "create_simple_unreal_texture": "Simple Unreal Texture", - "create_vdb": "VDB Cache" - } - }, - "publish": { - "CollectTextures": { - "enabled": true, - "active": true, - "main_workfile_extensions": [ - "mra" - ], - "other_workfile_extensions": [ - "spp", - "psd" - ], - "texture_extensions": [ - "exr", - "dpx", - "jpg", - "jpeg", - "png", - "tiff", - "tga", - "gif", - "svg" - ], - "workfile_families": [], - "texture_families": [], - "color_space": [ - "sRGB", - "Raw", - "ACEScg" - ], - "input_naming_patterns": { - "workfile": [ - "^([^.]+)(_[^_.]*)?_v([0-9]{3,}).+" - ], - "textures": [ - "^([^_.]+)_([^_.]+)_v([0-9]{3,})_([^_.]+)_({color_space})_(1[0-9]{3}).+" - ] - }, - "input_naming_groups": { - "workfile": [ - "asset", - "filler", - "version" - ], - "textures": [ - "asset", - "shader", - "version", - "channel", - "color_space", - "udim" - ] - }, - "workfile_subset_template": "textures{Subset}Workfile", - "texture_subset_template": "textures{Subset}_{Shader}_{Channel}" - }, - "ValidateSceneSettings": { - "enabled": true, - "optional": true, - "active": true, - "check_extensions": [ - "exr", - "dpx", - "jpg", - "jpeg", - "png", - "tiff", - "tga", - "gif", - "svg" - ], - "families": [ - "render" - ], - "skip_timelines_check": [] - }, - "ExtractThumbnailSP": { - "ffmpeg_args": { - "input": [ - "-apply_trc gamma22" - ], - "output": [] - } - }, - "CollectEditorial": { - "source_dir": "", - "extensions": [ - "mov", - "mp4" - ] - }, - "CollectHierarchyInstance": { - "shot_rename": true, - "shot_rename_template": "{project[code]}_{_sequence_}_{_shot_}", - "shot_rename_search_patterns": { - "_sequence_": "(sc\\d{3})", - "_shot_": "(sh\\d{3})" - }, - "shot_add_hierarchy": { - "enabled": true, - "parents_path": "{project}/{folder}/{sequence}", - "parents": { - "project": "{project[name]}", - "sequence": "{_sequence_}", - "folder": "shots" - } - }, - "shot_add_tasks": {} - }, - "CollectInstances": { - "custom_start_frame": 0, - "timeline_frame_start": 900000, - "timeline_frame_offset": 0, - "subsets": { - "referenceMain": { - "family": "review", - "families": [ - "clip" - ], - "extensions": [ - "mp4" - ], - "version": 0, - "keepSequence": false - }, - "audioMain": { - "family": "audio", - "families": [ - "clip" - ], - "extensions": [ - "wav" - ], - "version": 0, - "keepSequence": false - } - } - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/substancepainter.json b/client/ayon_core/settings/defaults/project_settings/substancepainter.json deleted file mode 100644 index 2f9344d435..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/substancepainter.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "shelves": {} -} diff --git a/client/ayon_core/settings/defaults/project_settings/traypublisher.json b/client/ayon_core/settings/defaults/project_settings/traypublisher.json deleted file mode 100644 index 7d2f358cb2..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/traypublisher.json +++ /dev/null @@ -1,352 +0,0 @@ -{ - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "simple_creators": [ - { - "family": "workfile", - "identifier": "", - "label": "Workfile", - "icon": "fa.file", - "default_variants": [ - "Main" - ], - "description": "Backup of a working scene", - "detailed_description": "Workfiles are full scenes from any application that are directly edited by artists. They represent a state of work on a task at a given point and are usually not directly referenced into other scenes.", - "allow_sequences": false, - "allow_multiple_items": false, - "allow_version_control": false, - "extensions": [ - ".ma", - ".mb", - ".nk", - ".hrox", - ".hip", - ".hiplc", - ".hipnc", - ".blend", - ".scn", - ".tvpp", - ".comp", - ".zip", - ".prproj", - ".drp", - ".psd", - ".psb", - ".aep" - ] - }, - { - "family": "model", - "identifier": "", - "label": "Model", - "icon": "fa.cubes", - "default_variants": [ - "Main", - "Proxy", - "Sculpt" - ], - "description": "Clean models", - "detailed_description": "Models should only contain geometry data, without any extras like cameras, locators or bones.\n\nKeep in mind that models published from tray publisher are not validated for correctness. ", - "allow_sequences": false, - "allow_multiple_items": true, - "allow_version_control": false, - "extensions": [ - ".ma", - ".mb", - ".obj", - ".abc", - ".fbx", - ".bgeo", - ".bgeogz", - ".bgeosc", - ".usd", - ".blend" - ] - }, - { - "family": "pointcache", - "identifier": "", - "label": "Pointcache", - "icon": "fa.gears", - "default_variants": [ - "Main" - ], - "description": "Geometry Caches", - "detailed_description": "Alembic or bgeo cache of animated data", - "allow_sequences": true, - "allow_multiple_items": true, - "allow_version_control": false, - "extensions": [ - ".abc", - ".bgeo", - ".bgeogz", - ".bgeosc" - ] - }, - { - "family": "plate", - "identifier": "", - "label": "Plate", - "icon": "mdi.camera-image", - "default_variants": [ - "Main", - "BG", - "Animatic", - "Reference", - "Offline" - ], - "description": "Footage Plates", - "detailed_description": "Any type of image seqeuence coming from outside of the studio. Usually camera footage, but could also be animatics used for reference.", - "allow_sequences": true, - "allow_multiple_items": true, - "allow_version_control": false, - "extensions": [ - ".exr", - ".png", - ".dpx", - ".jpg", - ".tiff", - ".tif", - ".mov", - ".mp4", - ".avi" - ] - }, - { - "family": "render", - "identifier": "", - "label": "Render", - "icon": "mdi.folder-multiple-image", - "default_variants": [], - "description": "Rendered images or video", - "detailed_description": "Sequence or single file renders", - "allow_sequences": true, - "allow_multiple_items": true, - "allow_version_control": false, - "extensions": [ - ".exr", - ".png", - ".dpx", - ".jpg", - ".jpeg", - ".tiff", - ".tif", - ".mov", - ".mp4", - ".avi" - ] - }, - { - "family": "camera", - "identifier": "", - "label": "Camera", - "icon": "fa.video-camera", - "default_variants": [], - "description": "3d Camera", - "detailed_description": "Ideally this should be only camera itself with baked animation, however, it can technically also include helper geometry.", - "allow_sequences": false, - "allow_multiple_items": true, - "allow_version_control": false, - "extensions": [ - ".abc", - ".ma", - ".hip", - ".blend", - ".fbx", - ".usd" - ] - }, - { - "family": "image", - "identifier": "", - "label": "Image", - "icon": "fa.image", - "default_variants": [ - "Reference", - "Texture", - "Concept", - "Background" - ], - "description": "Single image", - "detailed_description": "Any image data can be published as image family. References, textures, concept art, matte paints. This is a fallback 2d family for everything that doesn't fit more specific family.", - "allow_sequences": false, - "allow_multiple_items": true, - "allow_version_control": false, - "extensions": [ - ".exr", - ".jpg", - ".jpeg", - ".dpx", - ".bmp", - ".tif", - ".tiff", - ".png", - ".psb", - ".psd" - ] - }, - { - "family": "vdb", - "identifier": "", - "label": "VDB Volumes", - "icon": "fa.cloud", - "default_variants": [], - "description": "Sparse volumetric data", - "detailed_description": "Hierarchical data structure for the efficient storage and manipulation of sparse volumetric data discretized on three-dimensional grids", - "allow_sequences": true, - "allow_multiple_items": true, - "allow_version_control": false, - "extensions": [ - ".vdb" - ] - }, - { - "family": "matchmove", - "identifier": "", - "label": "Matchmove", - "icon": "fa.empire", - "default_variants": [ - "Camera", - "Object", - "Mocap" - ], - "description": "Matchmoving script", - "detailed_description": "Script exported from matchmoving application to be later processed into a tracked camera with additional data", - "allow_sequences": false, - "allow_multiple_items": true, - "allow_version_control": false, - "extensions": [] - }, - { - "family": "rig", - "identifier": "", - "label": "Rig", - "icon": "fa.wheelchair", - "default_variants": [], - "description": "CG rig file", - "detailed_description": "CG rigged character or prop. Rig should be clean of any extra data and directly loadable into it's respective application\t", - "allow_sequences": false, - "allow_multiple_items": false, - "allow_version_control": false, - "extensions": [ - ".ma", - ".blend", - ".hip", - ".hda" - ] - }, - { - "family": "audio", - "identifier": "", - "label": "Audio ", - "icon": "fa5s.file-audio", - "default_variants": [ - "Main" - ], - "description": "Audio product", - "detailed_description": "Audio files for review or final delivery", - "allow_sequences": false, - "allow_multiple_items": false, - "allow_version_control": false, - "extensions": [ - ".wav" - ] - } - ], - "editorial_creators": { - "editorial_simple": { - "default_variants": [ - "Main" - ], - "clip_name_tokenizer": { - "_sequence_": "(sc\\d{3})", - "_shot_": "(sh\\d{3})" - }, - "shot_rename": { - "enabled": true, - "shot_rename_template": "{project[code]}_{_sequence_}_{_shot_}" - }, - "shot_hierarchy": { - "enabled": true, - "parents_path": "{project}/{folder}/{sequence}", - "parents": [ - { - "type": "Project", - "name": "project", - "value": "{project[name]}" - }, - { - "type": "Folder", - "name": "folder", - "value": "shots" - }, - { - "type": "Sequence", - "name": "sequence", - "value": "{_sequence_}" - } - ] - }, - "shot_add_tasks": {}, - "family_presets": [ - { - "family": "review", - "variant": "Reference", - "review": true, - "output_file_type": ".mp4" - }, - { - "family": "plate", - "variant": "", - "review": false, - "output_file_type": ".mov" - }, - { - "family": "audio", - "variant": "", - "review": false, - "output_file_type": ".wav" - } - ] - } - }, - "create": { - "BatchMovieCreator": { - "default_variants": [ - "Main" - ], - "default_tasks": [ - "Compositing" - ], - "extensions": [ - ".mov" - ] - } - }, - "publish": { - "CollectSequenceFrameData": { - "enabled": true, - "optional": true, - "active": false - }, - "ValidateFrameRange": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateExistingVersion": { - "enabled": true, - "optional": true, - "active": true - } - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/tvpaint.json b/client/ayon_core/settings/defaults/project_settings/tvpaint.json deleted file mode 100644 index d03b8b7227..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/tvpaint.json +++ /dev/null @@ -1,111 +0,0 @@ -{ - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "stop_timer_on_application_exit": false, - "create": { - "create_workfile": { - "enabled": true, - "default_variant": "Main", - "default_variants": [] - }, - "create_review": { - "enabled": true, - "active_on_create": true, - "default_variant": "Main", - "default_variants": [] - }, - "create_render_scene": { - "enabled": true, - "active_on_create": false, - "mark_for_review": true, - "default_pass_name": "beauty", - "default_variant": "Main", - "default_variants": [] - }, - "create_render_layer": { - "mark_for_review": false, - "default_pass_name": "beauty", - "default_variant": "Main", - "default_variants": [] - }, - "create_render_pass": { - "mark_for_review": false, - "default_variant": "Main", - "default_variants": [] - }, - "auto_detect_render": { - "enabled": false, - "allow_group_rename": true, - "group_name_template": "L{group_index}", - "group_idx_offset": 10, - "group_idx_padding": 3 - } - }, - "publish": { - "CollectRenderInstances": { - "ignore_render_pass_transparency": false - }, - "ExtractSequence": { - "review_bg": [ - 255, - 255, - 255, - 255 - ] - }, - "ValidateProjectSettings": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateMarks": { - "enabled": true, - "optional": true, - "active": true - }, - "ValidateStartFrame": { - "enabled": false, - "optional": true, - "active": true - }, - "ValidateAssetName": { - "enabled": true, - "optional": true, - "active": true - }, - "ExtractConvertToEXR": { - "enabled": false, - "replace_pngs": true, - "exr_compression": "ZIP" - } - }, - "load": { - "LoadImage": { - "defaults": { - "stretch": true, - "timestretch": true, - "preload": true - } - }, - "ImportImage": { - "defaults": { - "stretch": true, - "timestretch": true, - "preload": true - } - } - }, - "workfile_builder": { - "create_first_version": false, - "custom_templates": [] - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/unreal.json b/client/ayon_core/settings/defaults/project_settings/unreal.json deleted file mode 100644 index 20e55c74f0..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/unreal.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "level_sequences_for_layouts": false, - "delete_unmatched_assets": false, - "render_config_path": "", - "preroll_frames": 0, - "render_format": "png", - "project_setup": { - "dev_mode": false - } -} diff --git a/client/ayon_core/settings/defaults/project_settings/webpublisher.json b/client/ayon_core/settings/defaults/project_settings/webpublisher.json deleted file mode 100644 index e451bcfc17..0000000000 --- a/client/ayon_core/settings/defaults/project_settings/webpublisher.json +++ /dev/null @@ -1,145 +0,0 @@ -{ - "imageio": { - "activate_host_color_management": true, - "ocio_config": { - "override_global_config": false, - "filepath": [] - }, - "file_rules": { - "activate_host_rules": false, - "rules": {} - } - }, - "timeout_profiles": [ - { - "hosts": [ - "photoshop" - ], - "task_types": [], - "timeout": 600 - } - ], - "publish": { - "CollectPublishedFiles": { - "sync_next_version": false, - "task_type_to_family": { - "Animation": [ - { - "is_sequence": false, - "extensions": [ - "tvp" - ], - "families": [], - "tags": [], - "result_family": "workfile" - }, - { - "is_sequence": true, - "extensions": [ - "png", - "exr", - "tiff", - "tif" - ], - "families": [ - "review" - ], - "tags": [ - "review" - ], - "result_family": "render" - } - ], - "Compositing": [ - { - "is_sequence": false, - "extensions": [ - "aep" - ], - "families": [], - "tags": [], - "result_family": "workfile" - }, - { - "is_sequence": true, - "extensions": [ - "png", - "exr", - "tiff", - "tif" - ], - "families": [ - "review" - ], - "tags": [ - "review" - ], - "result_family": "render" - } - ], - "Layout": [ - { - "is_sequence": false, - "extensions": [ - "psd" - ], - "families": [], - "tags": [], - "result_family": "workfile" - }, - { - "is_sequence": false, - "extensions": [ - "png", - "jpg", - "jpeg", - "tiff", - "tif" - ], - "families": [ - "review" - ], - "tags": [ - "review" - ], - "result_family": "image" - } - ], - "default_task_type": [ - { - "is_sequence": false, - "extensions": [ - "tvp", - "psd" - ], - "families": [], - "tags": [], - "result_family": "workfile" - }, - { - "is_sequence": true, - "extensions": [ - "png", - "exr", - "tiff", - "tif" - ], - "families": [ - "review" - ], - "tags": [ - "review" - ], - "result_family": "render" - } - ], - "__dynamic_keys_labels__": { - "default_task_type": "Default task type" - } - } - }, - "CollectTVPaintInstances": { - "layer_name_regex": "(?PL[0-9]{3}_\\w+)_(?P.+)" - } - } -} diff --git a/client/ayon_core/settings/defaults/system_settings/applications.json b/client/ayon_core/settings/defaults/system_settings/applications.json deleted file mode 100644 index a5283751e9..0000000000 --- a/client/ayon_core/settings/defaults/system_settings/applications.json +++ /dev/null @@ -1,1619 +0,0 @@ -{ - "maya": { - "enabled": true, - "label": "Maya", - "icon": "{}/app_icons/maya.png", - "host_name": "maya", - "environment": { - "MAYA_DISABLE_CLIC_IPM": "Yes", - "MAYA_DISABLE_CIP": "Yes", - "MAYA_DISABLE_CER": "Yes", - "PYMEL_SKIP_MEL_INIT": "Yes", - "LC_ALL": "C" - }, - "variants": { - "2024": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Autodesk\\Maya2024\\bin\\maya.exe" - ], - "darwin": [], - "linux": [ - "/usr/autodesk/maya2024/bin/maya" - ] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": { - "MAYA_VERSION": "2024" - } - }, - "2023": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Autodesk\\Maya2023\\bin\\maya.exe" - ], - "darwin": [], - "linux": [ - "/usr/autodesk/maya2023/bin/maya" - ] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": { - "MAYA_VERSION": "2023" - } - }, - "2022": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Autodesk\\Maya2022\\bin\\maya.exe" - ], - "darwin": [], - "linux": [ - "/usr/autodesk/maya2022/bin/maya" - ] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": { - "MAYA_VERSION": "2022" - } - } - } - }, - "mayapy": { - "enabled": true, - "label": "MayaPy", - "icon": "{}/app_icons/maya.png", - "host_name": "maya", - "environment": { - "MAYA_DISABLE_CLIC_IPM": "Yes", - "MAYA_DISABLE_CIP": "Yes", - "MAYA_DISABLE_CER": "Yes", - "PYMEL_SKIP_MEL_INIT": "Yes", - "LC_ALL": "C" - }, - "variants": { - "2024": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Autodesk\\Maya2024\\bin\\mayapy.exe" - ], - "darwin": [], - "linux": [ - "/usr/autodesk/maya2024/bin/mayapy" - ] - }, - "arguments": { - "windows": [ - "-I" - ], - "darwin": [], - "linux": [ - "-I" - ] - }, - "environment": {} - }, - "2023": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Autodesk\\Maya2023\\bin\\mayapy.exe" - ], - "darwin": [], - "linux": [ - "/usr/autodesk/maya2023/bin/mayapy" - ] - }, - "arguments": { - "windows": [ - "-I" - ], - "darwin": [], - "linux": [ - "-I" - ] - }, - "environment": {} - } - } - }, - "3dsmax": { - "enabled": true, - "label": "3ds max", - "icon": "{}/app_icons/3dsmax.png", - "host_name": "max", - "environment": {}, - "variants": { - "2023": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Autodesk\\3ds Max 2023\\3dsmax.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": { - "3DSMAX_VERSION": "2023" - } - } - } - }, - "flame": { - "enabled": true, - "label": "Flame", - "icon": "{}/app_icons/flame.png", - "host_name": "flame", - "environment": { - "FLAME_SCRIPT_DIRS": { - "windows": "", - "darwin": "", - "linux": "" - }, - "FLAME_WIRETAP_HOSTNAME": "", - "FLAME_WIRETAP_VOLUME": "stonefs", - "FLAME_WIRETAP_GROUP": "staff" - }, - "variants": { - "2021": { - "use_python_2": true, - "executables": { - "windows": [], - "darwin": [ - "/opt/Autodesk/flame_2021/bin/flame.app/Contents/MacOS/startApp" - ], - "linux": [ - "/opt/Autodesk/flame_2021/bin/startApplication" - ] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": { - "OPENPYPE_FLAME_PYTHON_EXEC": "/opt/Autodesk/python/2021/bin/python2.7", - "OPENPYPE_FLAME_PYTHONPATH": "/opt/Autodesk/flame_2021/python", - "OPENPYPE_WIRETAP_TOOLS": "/opt/Autodesk/wiretap/tools/2021" - } - }, - "2021_1": { - "use_python_2": true, - "executables": { - "windows": [], - "darwin": [ - "/opt/Autodesk/flame_2021.1/bin/flame.app/Contents/MacOS/startApp" - ], - "linux": [ - "/opt/Autodesk/flame_2021.1/bin/startApplication" - ] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": { - "OPENPYPE_FLAME_PYTHON_EXEC": "/opt/Autodesk/python/2021.1/bin/python2.7", - "OPENPYPE_FLAME_PYTHONPATH": "/opt/Autodesk/flame_2021.1/python", - "OPENPYPE_WIRETAP_TOOLS": "/opt/Autodesk/wiretap/tools/2021.1" - } - }, - "__dynamic_keys_labels__": { - "2021": "2021", - "2021_1": "2021.1" - } - } - }, - "nuke": { - "enabled": true, - "label": "Nuke", - "icon": "{}/app_icons/nuke.png", - "host_name": "nuke", - "environment": { - "NUKE_PATH": [ - "{NUKE_PATH}", - "{OPENPYPE_STUDIO_PLUGINS}/nuke" - ] - }, - "variants": { - "13-2": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke13.2v1\\Nuke13.2.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke13.2v1/Nuke13.2" - ] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "13-0": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke13.0v1\\Nuke13.0.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke13.0v1/Nuke13.0" - ] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "12-2": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke12.2v3\\Nuke12.2.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke12.2v3Nuke12.2" - ] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "12-0": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke12.0v1\\Nuke12.0.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke12.0v1/Nuke12.0" - ] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "11-3": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke11.3v1\\Nuke11.3.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke11.3v5/Nuke11.3" - ] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "11-2": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke11.2v2\\Nuke11.2.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "11-0": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke11.0v4\\Nuke11.0.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "__dynamic_keys_labels__": { - "13-2": "13.2", - "13-0": "13.0", - "12-2": "12.2", - "12-0": "12.0", - "11-3": "11.3", - "11-2": "11.2", - "11-0": "11.0" - } - } - }, - "nukeassist": { - "enabled": true, - "label": "Nuke Assist", - "icon": "{}/app_icons/nuke.png", - "host_name": "nuke", - "environment": { - "NUKE_PATH": [ - "{NUKE_PATH}", - "{OPENPYPE_STUDIO_PLUGINS}/nuke" - ] - }, - "variants": { - "13-2": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke13.2v1\\Nuke13.2.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke13.2v1/Nuke13.2" - ] - }, - "arguments": { - "windows": [ - "--nukeassist" - ], - "darwin": [ - "--nukeassist" - ], - "linux": [ - "--nukeassist" - ] - }, - "environment": {} - }, - "13-0": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke13.0v1\\Nuke13.0.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke13.0v1/Nuke13.0" - ] - }, - "arguments": { - "windows": [ - "--nukeassist" - ], - "darwin": [ - "--nukeassist" - ], - "linux": [ - "--nukeassist" - ] - }, - "environment": {} - }, - "12-2": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke12.2v3\\Nuke12.2.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke12.2v3Nuke12.2" - ] - }, - "arguments": { - "windows": [ - "--nukeassist" - ], - "darwin": [ - "--nukeassist" - ], - "linux": [ - "--nukeassist" - ] - }, - "environment": {} - }, - "12-0": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke12.0v1\\Nuke12.0.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke12.0v1/Nuke12.0" - ] - }, - "arguments": { - "windows": [ - "--nukeassist" - ], - "darwin": [ - "--nukeassist" - ], - "linux": [ - "--nukeassist" - ] - }, - "environment": {} - }, - "11-3": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke11.3v1\\Nuke11.3.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke11.3v5/Nuke11.3" - ] - }, - "arguments": { - "windows": [ - "--nukeassist" - ], - "darwin": [ - "--nukeassist" - ], - "linux": [ - "--nukeassist" - ] - }, - "environment": {} - }, - "11-2": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke11.2v2\\Nuke11.2.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [ - "--nukeassist" - ], - "darwin": [ - "--nukeassist" - ], - "linux": [ - "--nukeassist" - ] - }, - "environment": {} - }, - "__dynamic_keys_labels__": { - "13-2": "13.2", - "13-0": "13.0", - "12-2": "12.2", - "12-0": "12.0", - "11-3": "11.3", - "11-2": "11.2" - } - } - }, - "nukex": { - "enabled": true, - "label": "Nuke X", - "icon": "{}/app_icons/nukex.png", - "host_name": "nuke", - "environment": { - "NUKE_PATH": [ - "{NUKE_PATH}", - "{OPENPYPE_STUDIO_PLUGINS}/nuke" - ] - }, - "variants": { - "13-2": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke13.2v1\\Nuke13.2.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke13.2v1/Nuke13.2" - ] - }, - "arguments": { - "windows": [ - "--nukex" - ], - "darwin": [ - "--nukex" - ], - "linux": [ - "--nukex" - ] - }, - "environment": {} - }, - "13-0": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke13.0v1\\Nuke13.0.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke13.0v1/Nuke13.0" - ] - }, - "arguments": { - "windows": [ - "--nukex" - ], - "darwin": [ - "--nukex" - ], - "linux": [ - "--nukex" - ] - }, - "environment": {} - }, - "12-2": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke12.2v3\\Nuke12.2.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke12.2v3Nuke12.2" - ] - }, - "arguments": { - "windows": [ - "--nukex" - ], - "darwin": [ - "--nukex" - ], - "linux": [ - "--nukex" - ] - }, - "environment": {} - }, - "12-0": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke12.0v1\\Nuke12.0.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke12.0v1/Nuke12.0" - ] - }, - "arguments": { - "windows": [ - "--nukex" - ], - "darwin": [ - "--nukex" - ], - "linux": [ - "--nukex" - ] - }, - "environment": {} - }, - "11-3": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke11.3v1\\Nuke11.3.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke11.3v5/Nuke11.3" - ] - }, - "arguments": { - "windows": [ - "--nukex" - ], - "darwin": [ - "--nukex" - ], - "linux": [ - "--nukex" - ] - }, - "environment": {} - }, - "11-2": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke11.2v2\\Nuke11.2.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [ - "--nukex" - ], - "darwin": [ - "--nukex" - ], - "linux": [ - "--nukex" - ] - }, - "environment": {} - }, - "__dynamic_keys_labels__": { - "13-2": "13.2", - "13-0": "13.0", - "12-2": "12.2", - "12-0": "12.0", - "11-3": "11.3", - "11-2": "11.2" - } - } - }, - "nukestudio": { - "enabled": true, - "label": "Nuke Studio", - "icon": "{}/app_icons/nukestudio.png", - "host_name": "hiero", - "environment": { - "WORKFILES_STARTUP": "0", - "TAG_ASSETBUILD_STARTUP": "0" - }, - "variants": { - "13-2": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke13.2v1\\Nuke13.2.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke13.2v1/Nuke13.2" - ] - }, - "arguments": { - "windows": [ - "--studio" - ], - "darwin": [ - "--studio" - ], - "linux": [ - "--studio" - ] - }, - "environment": {} - }, - "13-0": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke13.0v1\\Nuke13.0.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke13.0v1/Nuke13.0" - ] - }, - "arguments": { - "windows": [ - "--studio" - ], - "darwin": [ - "--studio" - ], - "linux": [ - "--studio" - ] - }, - "environment": {} - }, - "12-2": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke12.2v3\\Nuke12.2.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke12.2v3Nuke12.2" - ] - }, - "arguments": { - "windows": [ - "--studio" - ], - "darwin": [ - "--studio" - ], - "linux": [ - "--studio" - ] - }, - "environment": {} - }, - "12-0": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke12.0v1\\Nuke12.0.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke12.0v1/Nuke12.0" - ] - }, - "arguments": { - "windows": [ - "--studio" - ], - "darwin": [ - "--studio" - ], - "linux": [ - "--studio" - ] - }, - "environment": {} - }, - "11-3": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke11.3v1\\Nuke11.3.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke11.3v5/Nuke11.3" - ] - }, - "arguments": { - "windows": [ - "--studio" - ], - "darwin": [ - "--studio" - ], - "linux": [ - "--studio" - ] - }, - "environment": {} - }, - "11-2": { - "use_python_2": true, - "executables": { - "windows": [], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [ - "--studio" - ], - "darwin": [ - "--studio" - ], - "linux": [ - "--studio" - ] - }, - "environment": {} - }, - "__dynamic_keys_labels__": { - "13-2": "13.2", - "13-0": "13.0", - "12-2": "12.2", - "12-0": "12.0", - "11-3": "11.3", - "11-2": "11.2" - } - } - }, - "hiero": { - "enabled": true, - "label": "Hiero", - "icon": "{}/app_icons/hiero.png", - "host_name": "hiero", - "environment": { - "WORKFILES_STARTUP": "0", - "TAG_ASSETBUILD_STARTUP": "0" - }, - "variants": { - "13-2": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke13.2v1\\Nuke13.2.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke13.2v1/Nuke13.2" - ] - }, - "arguments": { - "windows": [ - "--hiero" - ], - "darwin": [ - "--hiero" - ], - "linux": [ - "--hiero" - ] - }, - "environment": {} - }, - "13-0": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke13.0v1\\Nuke13.0.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke13.0v1/Nuke13.0" - ] - }, - "arguments": { - "windows": [ - "--hiero" - ], - "darwin": [ - "--hiero" - ], - "linux": [ - "--hiero" - ] - }, - "environment": {} - }, - "12-2": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke12.2v3\\Nuke12.2.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke12.2v3Nuke12.2" - ] - }, - "arguments": { - "windows": [ - "--hiero" - ], - "darwin": [ - "--hiero" - ], - "linux": [ - "--hiero" - ] - }, - "environment": {} - }, - "12-0": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke12.0v1\\Nuke12.0.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke12.0v1/Nuke12.0" - ] - }, - "arguments": { - "windows": [ - "--hiero" - ], - "darwin": [ - "--hiero" - ], - "linux": [ - "--hiero" - ] - }, - "environment": {} - }, - "11-3": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke11.3v1\\Nuke11.3.exe" - ], - "darwin": [], - "linux": [ - "/usr/local/Nuke11.3v5/Nuke11.3" - ] - }, - "arguments": { - "windows": [ - "--hiero" - ], - "darwin": [ - "--hiero" - ], - "linux": [ - "--hiero" - ] - }, - "environment": {} - }, - "11-2": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Nuke11.2v2\\Nuke11.2.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [ - "--hiero" - ], - "darwin": [ - "--hiero" - ], - "linux": [ - "--hiero" - ] - }, - "environment": {} - }, - "__dynamic_keys_labels__": { - "13-2": "13.2", - "13-0": "13.0", - "12-2": "12.2", - "12-0": "12.0", - "11-3": "11.3", - "11-2": "11.2" - } - } - }, - "fusion": { - "enabled": true, - "label": "Fusion", - "icon": "{}/app_icons/fusion.png", - "host_name": "fusion", - "environment": { - "FUSION_PYTHON3_HOME": { - "windows": "{LOCALAPPDATA}/Programs/Python/Python36", - "darwin": "~/Library/Python/3.6/bin", - "linux": "/opt/Python/3.6/bin" - } - }, - "variants": { - "18": { - "executables": { - "windows": [ - "C:\\Program Files\\Blackmagic Design\\Fusion 18\\Fusion.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "17": { - "executables": { - "windows": [ - "C:\\Program Files\\Blackmagic Design\\Fusion 17\\Fusion.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "16": { - "executables": { - "windows": [ - "C:\\Program Files\\Blackmagic Design\\Fusion 16\\Fusion.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "9": { - "executables": { - "windows": [ - "C:\\Program Files\\Blackmagic Design\\Fusion 9\\Fusion.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - } - } - }, - "resolve": { - "enabled": true, - "label": "Resolve", - "icon": "{}/app_icons/resolve.png", - "host_name": "resolve", - "environment": { - "RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR": [], - "RESOLVE_PYTHON3_HOME": { - "windows": "{LOCALAPPDATA}/Programs/Python/Python36", - "darwin": "/Library/Frameworks/Python.framework/Versions/3.6", - "linux": "/opt/Python/3.6" - } - }, - "variants": { - "stable": { - "enabled": true, - "variant_label": "stable", - "use_python_2": false, - "executables": { - "windows": [ - "C:/Program Files/Blackmagic Design/DaVinci Resolve/Resolve.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - } - } - }, - "houdini": { - "enabled": true, - "label": "Houdini", - "icon": "{}/app_icons/houdini.png", - "host_name": "houdini", - "environment": {}, - "variants": { - "18-5": { - "use_python_2": true, - "executables": { - "windows": [ - "C:\\Program Files\\Side Effects Software\\Houdini 18.5.499\\bin\\houdini.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "18": { - "use_python_2": true, - "executables": { - "windows": [], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "17": { - "use_python_2": true, - "executables": { - "windows": [], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "__dynamic_keys_labels__": { - "18-5": "18.5", - "18": "18", - "17": "17" - } - } - }, - "blender": { - "enabled": true, - "label": "Blender", - "icon": "{}/app_icons/blender.png", - "host_name": "blender", - "environment": {}, - "variants": { - "2-83": { - "executables": { - "windows": [ - "C:\\Program Files\\Blender Foundation\\Blender 2.83\\blender.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [ - "--python-use-system-env" - ], - "darwin": [ - "--python-use-system-env" - ], - "linux": [ - "--python-use-system-env" - ] - }, - "environment": {} - }, - "2-90": { - "executables": { - "windows": [ - "C:\\Program Files\\Blender Foundation\\Blender 2.90\\blender.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [ - "--python-use-system-env" - ], - "darwin": [ - "--python-use-system-env" - ], - "linux": [ - "--python-use-system-env" - ] - }, - "environment": {} - }, - "2-91": { - "executables": { - "windows": [ - "C:\\Program Files\\Blender Foundation\\Blender 2.91\\blender.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [ - "--python-use-system-env" - ], - "darwin": [ - "--python-use-system-env" - ], - "linux": [ - "--python-use-system-env" - ] - }, - "environment": {} - }, - "__dynamic_keys_labels__": { - "2-83": "2.83", - "2-90": "2.90", - "2-91": "2.91" - } - } - }, - "harmony": { - "enabled": true, - "label": "Harmony", - "icon": "{}/app_icons/harmony.png", - "host_name": "harmony", - "environment": { - "AVALON_HARMONY_WORKFILES_ON_LAUNCH": "1" - }, - "variants": { - "21": { - "executables": { - "windows": [ - "c:\\Program Files (x86)\\Toon Boom Animation\\Toon Boom Harmony 21 Premium\\win64\\bin\\HarmonyPremium.exe" - ], - "darwin": [ - "/Applications/Toon Boom Harmony 21 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium" - ], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "20": { - "executables": { - "windows": [ - "c:\\Program Files (x86)\\Toon Boom Animation\\Toon Boom Harmony 20 Premium\\win64\\bin\\HarmonyPremium.exe" - ], - "darwin": [ - "/Applications/Toon Boom Harmony 20 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium" - ], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "17": { - "executables": { - "windows": [ - "c:\\Program Files (x86)\\Toon Boom Animation\\Toon Boom Harmony 17 Premium\\win64\\bin\\HarmonyPremium.exe" - ], - "darwin": [ - "/Applications/Toon Boom Harmony 17 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium" - ], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - } - } - }, - "tvpaint": { - "enabled": true, - "label": "TVPaint", - "icon": "{}/app_icons/tvpaint.png", - "host_name": "tvpaint", - "environment": {}, - "variants": { - "animation_11-64bits": { - "executables": { - "windows": [ - "C:\\Program Files\\TVPaint Developpement\\TVPaint Animation 11 (64bits)\\TVPaint Animation 11 (64bits).exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "animation_11-32bits": { - "executables": { - "windows": [ - "C:\\Program Files (x86)\\TVPaint Developpement\\TVPaint Animation 11 (32bits)\\TVPaint Animation 11 (32bits).exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "__dynamic_keys_labels__": { - "animation_11-64bits": "11 (64bits)", - "animation_11-32bits": "11 (32bits)" - } - } - }, - "photoshop": { - "enabled": true, - "label": "Photoshop", - "icon": "{}/app_icons/photoshop.png", - "host_name": "photoshop", - "environment": { - "AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH": "1", - "WORKFILES_SAVE_AS": "Yes" - }, - "variants": { - "2020": { - "executables": { - "windows": [ - "C:\\Program Files\\Adobe\\Adobe Photoshop 2020\\Photoshop.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "2021": { - "executables": { - "windows": [ - "C:\\Program Files\\Adobe\\Adobe Photoshop 2021\\Photoshop.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "2022": { - "executables": { - "windows": [ - "C:\\Program Files\\Adobe\\Adobe Photoshop 2022\\Photoshop.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - } - } - }, - "aftereffects": { - "enabled": true, - "label": "AfterEffects", - "icon": "{}/app_icons/aftereffects.png", - "host_name": "aftereffects", - "environment": { - "AVALON_AFTEREFFECTS_WORKFILES_ON_LAUNCH": "1", - "WORKFILES_SAVE_AS": "Yes" - }, - "variants": { - "2020": { - "executables": { - "windows": [ - "C:\\Program Files\\Adobe\\Adobe After Effects 2020\\Support Files\\AfterFX.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "2021": { - "executables": { - "windows": [ - "C:\\Program Files\\Adobe\\Adobe After Effects 2021\\Support Files\\AfterFX.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "2022": { - "executables": { - "windows": [ - "C:\\Program Files\\Adobe\\Adobe After Effects 2022\\Support Files\\AfterFX.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": { - "MULTIPROCESS": "No" - } - } - } - }, - "celaction": { - "enabled": true, - "label": "CelAction 2D", - "icon": "app_icons/celaction.png", - "host_name": "celaction", - "environment": { - "CELACTION_TEMPLATE": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/celaction/celaction_template_scene.scn" - }, - "variants": { - "current": { - "enabled": true, - "variant_label": "Current", - "use_python_2": false, - "executables": { - "windows": [ - "C:/Program Files/CelAction/CelAction2D Studio/CelAction2D.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - } - } - }, - "substancepainter": { - "enabled": true, - "label": "Substance Painter", - "icon": "app_icons/substancepainter.png", - "host_name": "substancepainter", - "environment": {}, - "variants": { - "8-2-0": { - "executables": { - "windows": [ - "C:\\Program Files\\Adobe\\Adobe Substance 3D Painter\\Adobe Substance 3D Painter.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "__dynamic_keys_labels__": { - "8-2-0": "8.2.0" - } - } - }, - "unreal": { - "enabled": true, - "label": "Unreal Editor", - "icon": "{}/app_icons/ue4.png", - "host_name": "unreal", - "environment": { - "UE_PYTHONPATH": "{PYTHONPATH}" - }, - "variants": { - "5-0": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Epic Games\\UE_5.0\\Engine\\Binaries\\Win64\\UnrealEditor.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "5-1": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Epic Games\\UE_5.1\\Engine\\Binaries\\Win64\\UnrealEditor.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "__dynamic_keys_labels__": { - "5-1": "Unreal 5.1", - "5-0": "Unreal 5.0" - } - } - }, - "djvview": { - "enabled": true, - "label": "DJV View", - "icon": "{}/app_icons/djvView.png", - "host_name": "", - "environment": {}, - "variants": { - "1-1": { - "use_python_2": false, - "executables": { - "windows": [], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": {} - }, - "__dynamic_keys_labels__": { - "1-1": "1.1" - } - } - }, - "additional_apps": {} -} diff --git a/client/ayon_core/settings/defaults/system_settings/general.json b/client/ayon_core/settings/defaults/system_settings/general.json deleted file mode 100644 index 496c37cd4d..0000000000 --- a/client/ayon_core/settings/defaults/system_settings/general.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "studio_name": "Studio name", - "studio_code": "stu", - "admin_password": "", - "environment": {}, - "log_to_server": true, - "disk_mapping": { - "windows": [], - "linux": [], - "darwin": [] - }, - "local_env_white_list": [], - "openpype_path": { - "windows": [], - "darwin": [], - "linux": [] - }, - "local_openpype_path": { - "windows": "", - "darwin": "", - "linux": "" - }, - "production_version": "", - "staging_version": "", - "version_check_interval": 5 -} diff --git a/client/ayon_core/settings/defaults/system_settings/modules.json b/client/ayon_core/settings/defaults/system_settings/modules.json deleted file mode 100644 index 22daae3b34..0000000000 --- a/client/ayon_core/settings/defaults/system_settings/modules.json +++ /dev/null @@ -1,200 +0,0 @@ -{ - "addon_paths": { - "windows": [], - "darwin": [], - "linux": [] - }, - "avalon": { - "AVALON_TIMEOUT": 1000, - "AVALON_THUMBNAIL_ROOT": { - "windows": "", - "darwin": "", - "linux": "" - } - }, - "ftrack": { - "enabled": false, - "ftrack_server": "", - "ftrack_actions_path": { - "windows": [], - "darwin": [], - "linux": [] - }, - "ftrack_events_path": { - "windows": [], - "darwin": [], - "linux": [] - }, - "intent": { - "allow_empty_intent": true, - "empty_intent_label": "", - "items": { - "wip": "WIP", - "final": "Final", - "test": "Test" - }, - "default": "" - }, - "custom_attributes": { - "show": { - "avalon_auto_sync": { - "write_security_roles": [ - "API", - "Administrator" - ], - "read_security_roles": [ - "API", - "Administrator" - ] - }, - "library_project": { - "write_security_roles": [ - "API", - "Administrator" - ], - "read_security_roles": [ - "API", - "Administrator" - ] - }, - "applications": { - "write_security_roles": [ - "API", - "Administrator" - ], - "read_security_roles": [ - "API", - "Administrator" - ] - } - }, - "is_hierarchical": { - "tools_env": { - "write_security_roles": [ - "API", - "Administrator" - ], - "read_security_roles": [ - "API", - "Administrator" - ] - }, - "avalon_mongo_id": { - "write_security_roles": [ - "API", - "Administrator" - ], - "read_security_roles": [ - "API", - "Administrator" - ] - }, - "fps": { - "write_security_roles": [], - "read_security_roles": [] - }, - "frameStart": { - "write_security_roles": [], - "read_security_roles": [] - }, - "frameEnd": { - "write_security_roles": [], - "read_security_roles": [] - }, - "clipIn": { - "write_security_roles": [], - "read_security_roles": [] - }, - "clipOut": { - "write_security_roles": [], - "read_security_roles": [] - }, - "handleStart": { - "write_security_roles": [], - "read_security_roles": [] - }, - "handleEnd": { - "write_security_roles": [], - "read_security_roles": [] - }, - "resolutionWidth": { - "write_security_roles": [], - "read_security_roles": [] - }, - "resolutionHeight": { - "write_security_roles": [], - "read_security_roles": [] - }, - "pixelAspect": { - "write_security_roles": [], - "read_security_roles": [] - } - } - } - }, - "kitsu": { - "enabled": false, - "server": "" - }, - "shotgrid": { - "enabled": false, - "leecher_manager_url": "http://127.0.0.1:3000", - "leecher_backend_url": "http://127.0.0.1:8090", - "filter_projects_by_login": true, - "shotgrid_settings": {} - }, - "timers_manager": { - "enabled": true, - "auto_stop": true, - "full_time": 15.0, - "message_time": 0.5, - "disregard_publishing": false - }, - "clockify": { - "enabled": false, - "workspace_name": "" - }, - "sync_server": { - "enabled": false, - "sites": {} - }, - "deadline": { - "enabled": true, - "deadline_urls": { - "default": "http://127.0.0.1:8082" - } - }, - "royalrender": { - "enabled": false, - "rr_paths": { - "default": { - "windows": "C:\\RR8", - "darwin": "/Volumes/share/RR8", - "linux": "/mnt/studio/RR8" - } - } - }, - "log_viewer": { - "enabled": true - }, - "standalonepublish_tool": { - "enabled": false - }, - "project_manager": { - "enabled": true - }, - "slack": { - "enabled": false - }, - "job_queue": { - "server_url": "", - "jobs_root": { - "windows": "", - "darwin": "", - "linux": "" - } - }, - "asset_reporter": { - "enabled": false - } -} diff --git a/client/ayon_core/settings/defaults/system_settings/tools.json b/client/ayon_core/settings/defaults/system_settings/tools.json deleted file mode 100644 index 921e13af3a..0000000000 --- a/client/ayon_core/settings/defaults/system_settings/tools.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "tool_groups": { - "mtoa": { - "environment": { - "MTOA": "{STUDIO_SOFTWARE}/arnold/mtoa_{MAYA_VERSION}_{MTOA_VERSION}", - "MAYA_RENDER_DESC_PATH": "{MTOA}", - "MAYA_MODULE_PATH": "{MTOA}", - "ARNOLD_PLUGIN_PATH": "{MTOA}/shaders", - "MTOA_EXTENSIONS_PATH": { - "darwin": "{MTOA}/extensions", - "linux": "{MTOA}/extensions", - "windows": "{MTOA}/extensions" - }, - "MTOA_EXTENSIONS": { - "darwin": "{MTOA}/extensions", - "linux": "{MTOA}/extensions", - "windows": "{MTOA}/extensions" - }, - "DYLD_LIBRARY_PATH": { - "darwin": "{MTOA}/bin" - }, - "PATH": { - "windows": "{PATH};{MTOA}/bin" - } - }, - "variants": { - "3-2": { - "host_names": [], - "app_variants": [], - "environment": { - "MTOA_VERSION": "3.2" - } - }, - "3-1": { - "host_names": [], - "app_variants": [], - "environment": { - "MTOA_VERSION": "3.1" - } - }, - "__dynamic_keys_labels__": { - "3-2": "3.2", - "3-1": "3.1" - } - } - }, - "vray": { - "environment": {}, - "variants": {} - }, - "yeti": { - "environment": {}, - "variants": {} - }, - "renderman": { - "environment": {}, - "variants": { - "24-3-maya": { - "host_names": [ - "maya" - ], - "app_variants": [ - "maya/2022" - ], - "environment": { - "RFMTREE": { - "windows": "C:\\Program Files\\Pixar\\RenderManForMaya-24.3", - "darwin": "/Applications/Pixar/RenderManForMaya-24.3", - "linux": "/opt/pixar/RenderManForMaya-24.3" - }, - "RMANTREE": { - "windows": "C:\\Program Files\\Pixar\\RenderManProServer-24.3", - "darwin": "/Applications/Pixar/RenderManProServer-24.3", - "linux": "/opt/pixar/RenderManProServer-24.3" - } - } - }, - "__dynamic_keys_labels__": { - "24-3-maya": "24.3 RFM" - } - } - }, - "__dynamic_keys_labels__": { - "mtoa": "Autodesk Arnold", - "vray": "Chaos Group Vray", - "yeti": "Peregrine Labs Yeti", - "renderman": "Pixar Renderman" - } - } -} diff --git a/client/ayon_core/settings/lib.py b/client/ayon_core/settings/lib.py index beae376b7c..69525d5b86 100644 --- a/client/ayon_core/settings/lib.py +++ b/client/ayon_core/settings/lib.py @@ -1,273 +1,219 @@ import os import json import logging +import collections import copy +import time -from .constants import ( - M_OVERRIDDEN_KEY, - - METADATA_KEYS, - - SYSTEM_SETTINGS_KEY, - PROJECT_SETTINGS_KEY, - DEFAULT_PROJECT_KEY -) - -from .ayon_settings import ( - get_ayon_project_settings, - get_ayon_system_settings -) +from ayon_core.client import get_ayon_server_api_connection log = logging.getLogger(__name__) -# Py2 + Py3 json decode exception -JSON_EXC = getattr(json.decoder, "JSONDecodeError", ValueError) + +class CacheItem: + lifetime = 10 + + def __init__(self, value, outdate_time=None): + self._value = value + if outdate_time is None: + outdate_time = time.time() + self.lifetime + self._outdate_time = outdate_time + + @classmethod + def create_outdated(cls): + return cls({}, 0) + + def get_value(self): + return copy.deepcopy(self._value) + + def update_value(self, value): + self._value = value + self._outdate_time = time.time() + self.lifetime + + @property + def is_outdated(self): + return time.time() > self._outdate_time -# Path to default settings -DEFAULTS_DIR = os.path.join( - os.path.dirname(os.path.abspath(__file__)), - "defaults" -) +class _AyonSettingsCache: + use_bundles = None + variant = None + addon_versions = CacheItem.create_outdated() + studio_settings = CacheItem.create_outdated() + cache_by_project_name = collections.defaultdict( + CacheItem.create_outdated) -# Variable where cache of default settings are stored -_DEFAULT_SETTINGS = None + @classmethod + def _use_bundles(cls): + if _AyonSettingsCache.use_bundles is None: + con = get_ayon_server_api_connection() + major, minor, _, _, _ = con.get_server_version_tuple() + use_bundles = True + if (major, minor) < (0, 3): + use_bundles = False + _AyonSettingsCache.use_bundles = use_bundles + return _AyonSettingsCache.use_bundles + @classmethod + def _get_variant(cls): + if _AyonSettingsCache.variant is None: + from ayon_core.lib import is_staging_enabled, is_dev_mode_enabled -def clear_metadata_from_settings(values): - """Remove all metadata keys from loaded settings.""" - if isinstance(values, dict): - for key in tuple(values.keys()): - if key in METADATA_KEYS: - values.pop(key) + variant = "production" + if is_dev_mode_enabled(): + variant = cls._get_bundle_name() + elif is_staging_enabled(): + variant = "staging" + + # Cache variant + _AyonSettingsCache.variant = variant + + # Set the variant to global ayon api connection + con = get_ayon_server_api_connection() + con.set_default_settings_variant(variant) + return _AyonSettingsCache.variant + + @classmethod + def _get_bundle_name(cls): + return os.environ["AYON_BUNDLE_NAME"] + + @classmethod + def get_value_by_project(cls, project_name): + cache_item = _AyonSettingsCache.cache_by_project_name[project_name] + if cache_item.is_outdated: + con = get_ayon_server_api_connection() + if cls._use_bundles(): + value = con.get_addons_settings( + bundle_name=cls._get_bundle_name(), + project_name=project_name, + variant=cls._get_variant() + ) else: - clear_metadata_from_settings(values[key]) - elif isinstance(values, list): - for item in values: - clear_metadata_from_settings(item) + value = con.get_addons_settings(project_name) + cache_item.update_value(value) + return cache_item.get_value() - -def get_local_settings(): - # TODO implement ayon implementation - return {} - - -def load_openpype_default_settings(): - """Load openpype default settings.""" - return load_jsons_from_dir(DEFAULTS_DIR) - - -def reset_default_settings(): - """Reset cache of default settings. Can't be used now.""" - global _DEFAULT_SETTINGS - _DEFAULT_SETTINGS = None - - -def _get_default_settings(): - return load_openpype_default_settings() - - -def get_default_settings(): - """Get default settings. - - Todo: - Cache loaded defaults. - - Returns: - dict: Loaded default settings. - """ - global _DEFAULT_SETTINGS - if _DEFAULT_SETTINGS is None: - _DEFAULT_SETTINGS = _get_default_settings() - return copy.deepcopy(_DEFAULT_SETTINGS) - - -def load_json_file(fpath): - # Load json data - try: - with open(fpath, "r") as opened_file: - return json.load(opened_file) - - except JSON_EXC: - log.warning( - "File has invalid json format \"{}\"".format(fpath), - exc_info=True + @classmethod + def _get_addon_versions_from_bundle(cls): + con = get_ayon_server_api_connection() + expected_bundle = cls._get_bundle_name() + bundles = con.get_bundles()["bundles"] + bundle = next( + ( + bundle + for bundle in bundles + if bundle["name"] == expected_bundle + ), + None ) - return {} + if bundle is not None: + return bundle["addons"] + return {} + @classmethod + def get_addon_versions(cls): + cache_item = _AyonSettingsCache.addon_versions + if cache_item.is_outdated: + if cls._use_bundles(): + addons = cls._get_addon_versions_from_bundle() + else: + con = get_ayon_server_api_connection() + settings_data = con.get_addons_settings( + only_values=False, + variant=cls._get_variant() + ) + addons = settings_data["versions"] + cache_item.update_value(addons) -def load_jsons_from_dir(path, *args, **kwargs): - """Load all .json files with content from entered folder path. - - Data are loaded recursively from a directory and recreate the - hierarchy as a dictionary. - - Entered path hierarchy: - |_ folder1 - | |_ data1.json - |_ folder2 - |_ subfolder1 - |_ data2.json - - Will result in: - ```javascript - { - "folder1": { - "data1": "CONTENT OF FILE" - }, - "folder2": { - "subfolder1": { - "data2": "CONTENT OF FILE" - } - } - } - ``` - - Args: - path (str): Path to the root folder where the json hierarchy starts. - - Returns: - dict: Loaded data. - """ - output = {} - - path = os.path.normpath(path) - if not os.path.exists(path): - # TODO warning - return output - - sub_keys = list(kwargs.pop("subkeys", args)) - for sub_key in tuple(sub_keys): - _path = os.path.join(path, sub_key) - if not os.path.exists(_path): - break - - path = _path - sub_keys.pop(0) - - base_len = len(path) + 1 - for base, _directories, filenames in os.walk(path): - base_items_str = base[base_len:] - if not base_items_str: - base_items = [] - else: - base_items = base_items_str.split(os.path.sep) - - for filename in filenames: - basename, ext = os.path.splitext(filename) - if ext == ".json": - full_path = os.path.join(base, filename) - value = load_json_file(full_path) - dict_keys = base_items + [basename] - output = subkey_merge(output, value, dict_keys) - - for sub_key in sub_keys: - output = output[sub_key] - return output - - -def subkey_merge(_dict, value, keys): - key = keys.pop(0) - if not keys: - _dict[key] = value - return _dict - - if key not in _dict: - _dict[key] = {} - _dict[key] = subkey_merge(_dict[key], value, keys) - - return _dict - - -def merge_overrides(source_dict, override_dict): - """Merge data from override_dict to source_dict.""" - - if M_OVERRIDDEN_KEY in override_dict: - overridden_keys = set(override_dict.pop(M_OVERRIDDEN_KEY)) - else: - overridden_keys = set() - - for key, value in override_dict.items(): - if (key in overridden_keys or key not in source_dict): - source_dict[key] = value - - elif isinstance(value, dict) and isinstance(source_dict[key], dict): - source_dict[key] = merge_overrides(source_dict[key], value) - - else: - source_dict[key] = value - return source_dict + return cache_item.get_value() def get_site_local_overrides(project_name, site_name, local_settings=None): """Site overrides from local settings for passet project and site name. + Deprecated: + This function is not implemented for AYON and will be removed. + Args: project_name (str): For which project are overrides. site_name (str): For which site are overrides needed. local_settings (dict): Preloaded local settings. They are loaded automatically if not passed. """ - # Check if local settings were passed - if local_settings is None: - local_settings = get_local_settings() - output = {} + return {} - # Skip if local settings are empty - if not local_settings: - return output - local_project_settings = local_settings.get("projects") or {} +def get_ayon_settings(project_name=None): + """AYON studio settings. - # Prepare overrides for entered project and for default project - project_locals = None - if project_name: - project_locals = local_project_settings.get(project_name) - default_project_locals = local_project_settings.get(DEFAULT_PROJECT_KEY) + Raw AYON settings values. - # First load and use local settings from default project - if default_project_locals and site_name in default_project_locals: - output.update(default_project_locals[site_name]) + Args: + project_name (Optional[str]): Project name. - # Apply project specific local settings if there are any - if project_locals and site_name in project_locals: - output.update(project_locals[site_name]) + Returns: + dict[str, Any]: AYON settings. + """ - return output + return _AyonSettingsCache.get_value_by_project(project_name) + + +def get_studio_settings(*args, **kwargs): + return _AyonSettingsCache.get_value_by_project(None) + + +def get_project_settings(project_name, *args, **kwargs): + return _AyonSettingsCache.get_value_by_project(project_name) + + +def get_general_environments(studio_settings=None): + """General studio environment variables. + + Args: + studio_settings (Optional[dict]): Pre-queried studio settings. + + Returns: + dict[str, Any]: General studio environment variables. + + """ + if studio_settings is None: + studio_settings = get_ayon_settings() + return json.loads(studio_settings["core"]["environments"]) + + +def get_project_environments(project_name, project_settings=None): + """Project environment variables. + + Args: + project_name (str): Project name. + project_settings (Optional[dict]): Pre-queried project settings. + + Returns: + dict[str, Any]: Project environment variables. + + """ + if project_settings is None: + project_settings = get_project_settings(project_name) + return json.loads( + project_settings["core"]["project_environments"] + ) def get_current_project_settings(): """Project settings for current context project. - Project name should be stored in environment variable `AVALON_PROJECT`. + Project name should be stored in environment variable `AYON_PROJECT_NAME`. This function should be used only in host context where environment variable must be set and should not happen that any part of process will change the value of the enviornment variable. """ - project_name = os.environ.get("AVALON_PROJECT") + project_name = os.environ.get("AYON_PROJECT_NAME") if not project_name: raise ValueError( - "Missing context project in environemt variable `AVALON_PROJECT`." + "Missing context project in environemt variable `AYON_PROJECT_NAME`." ) return get_project_settings(project_name) -def get_global_settings(): - default_settings = load_openpype_default_settings() - return default_settings["system_settings"]["general"] - -def get_general_environments(): - value = get_system_settings() - return value["general"]["environment"] - - -def get_system_settings(*args, **kwargs): - default_settings = get_default_settings()[SYSTEM_SETTINGS_KEY] - return get_ayon_system_settings(default_settings) - - -def get_project_settings(project_name, *args, **kwargs): - default_settings = get_default_settings()[PROJECT_SETTINGS_KEY] - return get_ayon_project_settings(default_settings, project_name) diff --git a/client/ayon_core/style/__init__.py b/client/ayon_core/style/__init__.py index 8b2dfa1bcb..8d3089ef86 100644 --- a/client/ayon_core/style/__init__.py +++ b/client/ayon_core/style/__init__.py @@ -198,7 +198,7 @@ def _load_font(): def load_stylesheet(): - """Load and return OpenPype Qt stylesheet.""" + """Load and return AYON Qt stylesheet.""" if _Cache.stylesheet is None: _Cache.stylesheet = _load_stylesheet() @@ -207,7 +207,7 @@ def load_stylesheet(): def get_app_icon_path(): - """Path to OpenPype icon.""" + """Path to AYON icon.""" return resources.get_ayon_icon_filepath() diff --git a/client/ayon_core/style/style.css b/client/ayon_core/style/style.css index f6ecebd683..fcc76b0bff 100644 --- a/client/ayon_core/style/style.css +++ b/client/ayon_core/style/style.css @@ -845,7 +845,7 @@ OverlayMessageWidget QWidget { padding: 1px 5px; } -#CreatorFamilyLabel { +#CreatorProductTypeLabel { font-size: 10pt; font-weight: bold; } @@ -875,7 +875,7 @@ OverlayMessageWidget QWidget { border-radius: 0px; } -#SubsetView::item, #RepresentationView:item { +#ProductView::item, #RepresentationView:item { padding: 5px 1px; border: 0px; } @@ -1025,7 +1025,7 @@ PixmapButton:disabled { background: {color:bg-view-selection}; } -#ListViewSubsetName[state="invalid"] { +#ListViewProductName[state="invalid"] { color: {color:publisher:error}; } @@ -1138,17 +1138,17 @@ ValidationArtistMessage QLabel { font-size: 13pt; } -#AssetNameInputWidget { +#FolderPathInputWidget { background: {color:bg-inputs}; border: 1px solid {color:border}; border-radius: 0.2em; } -#AssetNameInputWidget QWidget { +#FolderPathInputWidget QWidget { background: transparent; } -#AssetNameInputButton { +#FolderPathInputButton { border-bottom-left-radius: 0px; border-top-left-radius: 0px; padding: 0px; @@ -1159,23 +1159,23 @@ ValidationArtistMessage QLabel { border-bottom: none; } -#AssetNameInput { +#FolderPathInput { border-bottom-right-radius: 0px; border-top-right-radius: 0px; border: none; } -#AssetNameInputWidget:hover { +#FolderPathInputWidget:hover { border-color: {color:border-hover}; } -#AssetNameInputWidget:focus{ +#FolderPathInputWidget:focus{ border-color: {color:border-focus}; } -#AssetNameInputWidget:disabled { +#FolderPathInputWidget:disabled { background: {color:bg-inputs-disabled}; } -#TasksCombobox[state="invalid"], #AssetNameInputWidget[state="invalid"], #AssetNameInputButton[state="invalid"] { +#TasksCombobox[state="invalid"], #FolderPathInputWidget[state="invalid"], #FolderPathInputButton[state="invalid"] { border-color: {color:publisher:error}; } @@ -1192,7 +1192,7 @@ ValidationArtistMessage QLabel { #PublishCommentInput { padding: 0.2em; } -#FamilyIconLabel { +#ProductTypeIconLabel { font-size: 14pt; } #ArrowBtn, #ArrowBtn:disabled, #ArrowBtn:hover { diff --git a/client/ayon_core/tests/README.md b/client/ayon_core/tests/README.md deleted file mode 100644 index c05166767c..0000000000 --- a/client/ayon_core/tests/README.md +++ /dev/null @@ -1,4 +0,0 @@ -Tests for Pype --------------- -Trigger by: - `pype test --pype` \ No newline at end of file diff --git a/client/ayon_core/tests/__init__.py b/client/ayon_core/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/client/ayon_core/tests/lib.py b/client/ayon_core/tests/lib.py deleted file mode 100644 index c7d4423aba..0000000000 --- a/client/ayon_core/tests/lib.py +++ /dev/null @@ -1,88 +0,0 @@ -import os -import sys -import shutil -import tempfile -import contextlib - -import pyblish -import pyblish.plugin -from pyblish.vendor import six - - -# Setup -HOST = 'python' -FAMILY = 'test.family' - -REGISTERED = pyblish.plugin.registered_paths() -PACKAGEPATH = pyblish.lib.main_package_path() -ENVIRONMENT = os.environ.get("PYBLISHPLUGINPATH", "") -PLUGINPATH = os.path.join(PACKAGEPATH, '..', 'tests', 'plugins') - - -def setup(): - pyblish.plugin.deregister_all_paths() - - -def setup_empty(): - """Disable all plug-ins""" - setup() - pyblish.plugin.deregister_all_plugins() - pyblish.plugin.deregister_all_paths() - pyblish.plugin.deregister_all_hosts() - pyblish.plugin.deregister_all_callbacks() - pyblish.plugin.deregister_all_targets() - pyblish.api.deregister_all_discovery_filters() - - -def teardown(): - """Restore previously REGISTERED paths""" - - pyblish.plugin.deregister_all_paths() - for path in REGISTERED: - pyblish.plugin.register_plugin_path(path) - - os.environ["PYBLISHPLUGINPATH"] = ENVIRONMENT - pyblish.api.deregister_all_plugins() - pyblish.api.deregister_all_hosts() - pyblish.api.deregister_all_discovery_filters() - pyblish.api.deregister_test() - pyblish.api.__init__() - - -@contextlib.contextmanager -def captured_stdout(): - """Temporarily reassign stdout to a local variable""" - try: - sys.stdout = six.StringIO() - yield sys.stdout - finally: - sys.stdout = sys.__stdout__ - - -@contextlib.contextmanager -def captured_stderr(): - """Temporarily reassign stderr to a local variable""" - try: - sys.stderr = six.StringIO() - yield sys.stderr - finally: - sys.stderr = sys.__stderr__ - - -@contextlib.contextmanager -def tempdir(): - """Provide path to temporary directory""" - try: - tempdir = tempfile.mkdtemp() - yield tempdir - finally: - shutil.rmtree(tempdir) - - -def is_in_tests(): - """Returns if process is running in automatic tests mode. - - In tests mode different source DB is used, some plugins might be disabled - etc. - """ - return os.environ.get("IS_TEST") == '1' diff --git a/client/ayon_core/tests/mongo_performance.py b/client/ayon_core/tests/mongo_performance.py deleted file mode 100644 index 2df3363f4b..0000000000 --- a/client/ayon_core/tests/mongo_performance.py +++ /dev/null @@ -1,288 +0,0 @@ -import pymongo -import bson -import random -from datetime import datetime -import os - - -class TestPerformance(): - ''' - Class for testing performance of representation and their 'files' - parts. - Discussion is if embedded array: - 'files' : [ {'_id': '1111', 'path':'....}, - {'_id'...}] - OR documents: - 'files' : { - '1111': {'path':'....'}, - '2222': {'path':'...'} - } - is faster. - - Current results: - without additional partial index documents is 3x faster - With index is array 50x faster then document - - Partial index something like: - db.getCollection('performance_test').createIndex - ({'files._id': 1}, - {partialFilterExpresion: {'files': {'$exists': true}}}) - !DIDNT work for me, had to create manually in Compass - - ''' - - MONGO_URL = 'mongodb://localhost:27017' - MONGO_DB = 'performance_test' - MONGO_COLLECTION = 'performance_test' - - MAX_FILE_SIZE_B = 5000 - MAX_NUMBER_OF_SITES = 50 - ROOT_DIR = "C:/projects" - - inserted_ids = [] - - def __init__(self, version='array'): - ''' - It creates and fills collection, based on value of 'version'. - - :param version: 'array' - files as embedded array, - 'doc' - as document - ''' - self.client = pymongo.MongoClient(self.MONGO_URL) - self.db = self.client[self.MONGO_DB] - self.collection_name = self.MONGO_COLLECTION - - self.version = version - - if self.version != 'array': - self.collection_name = self.MONGO_COLLECTION + '_doc' - - self.collection = self.db[self.collection_name] - - self.ids = [] # for testing - self.inserted_ids = [] - - def prepare(self, no_of_records=100000, create_files=False): - ''' - Produce 'no_of_records' of representations with 'files' segment. - It depends on 'version' value in constructor, 'arrray' or 'doc' - :return: - ''' - print('Purging {} collection'.format(self.collection_name)) - self.collection.delete_many({}) - - id = bson.objectid.ObjectId() - - insert_recs = [] - for i in range(no_of_records): - file_id = bson.objectid.ObjectId() - file_id2 = bson.objectid.ObjectId() - file_id3 = bson.objectid.ObjectId() - - self.inserted_ids.extend([file_id, file_id2, file_id3]) - version_str = "v{:03d}".format(i + 1) - file_name = "test_Cylinder_workfileLookdev_{}.mb".\ - format(version_str) - - document = {"files": self.get_files(self.version, i + 1, - file_id, file_id2, file_id3, - create_files) - , - "context": { - "subset": "workfileLookdev", - "username": "petrk", - "task": "lookdev", - "family": "workfile", - "hierarchy": "Assets", - "project": {"code": "test", "name": "Test"}, - "version": i + 1, - "asset": "Cylinder", - "representation": "mb", - "root": self.ROOT_DIR - }, - "dependencies": [], - "name": "mb", - "parent": {"oid": '{}'.format(id)}, - "data": { - "path": "C:\\projects\\test_performance\\Assets\\Cylinder\\publish\\workfile\\workfileLookdev\\{}\\{}".format(version_str, file_name), # noqa: E501 - "template": "{root[work]}\\{project[name]}\\{hierarchy}\\{asset}\\publish\\{family}\\{subset}\\v{version:0>3}\\{project[code]}_{asset}_{subset}_v{version:0>3}<_{output}><.{frame:0>4}>.{representation}" # noqa: E501 - }, - "type": "representation", - "schema": "openpype:representation-2.0" - } - - insert_recs.append(document) - - print('Prepared {} records in {} collection'. - format(no_of_records, self.collection_name)) - - self.collection.insert_many(insert_recs) - # TODO refactore to produce real array and not needeing ugly regex - self.collection.insert_one({"inserted_id": self.inserted_ids}) - print('-' * 50) - - def run(self, queries=1000, loops=3): - ''' - Run X'queries' that are searching collection Y'loops' times - :param queries: how many times do ..find(...) - :param loops: loop of testing X queries - :return: None - ''' - print('Testing version {} on {}'.format(self.version, - self.collection_name)) - print('Queries rung {} in {} loops'.format(queries, loops)) - - inserted_ids = list(self.collection. - find({"inserted_id": {"$exists": True}})) - import re - self.ids = re.findall("'[0-9a-z]*'", str(inserted_ids)) - - import time - - found_cnt = 0 - for _ in range(loops): - print('Starting loop {}'.format(_)) - start = time.time() - for _ in range(queries): - # val = random.choice(self.ids) - # val = val.replace("'", '') - val = random.randint(0, 50) - print(val) - - if (self.version == 'array'): - # prepared for partial index, without 'files': exists - # wont engage - found = self.collection.\ - find({'files': {"$exists": True}, - 'files.sites.name': "local_{}".format(val)}).\ - count() - else: - key = "files.{}".format(val) - found = self.collection.find_one({key: {"$exists": True}}) - print("found {} records".format(found)) - # if found: - # found_cnt += len(list(found)) - - end = time.time() - print('duration per loop {}'.format(end - start)) - print("found_cnt {}".format(found_cnt)) - - def get_files(self, mode, i, file_id, file_id2, file_id3, - create_files=False): - ''' - Wrapper to decide if 'array' or document version should be used - :param mode: 'array'|'doc' - :param i: step number - :param file_id: ObjectId of first dummy file - :param file_id2: .. - :param file_id3: .. - :return: - ''' - if mode == 'array': - return self.get_files_array(i, file_id, file_id2, file_id3, - create_files) - else: - return self.get_files_doc(i, file_id, file_id2, file_id3) - - def get_files_array(self, i, file_id, file_id2, file_id3, - create_files=False): - ret = [ - { - "path": "{root[work]}" + "{root[work]}/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_A_workfileLookdev_v{:03d}.dat".format(i, i), # noqa: E501 - "_id": '{}'.format(file_id), - "hash": "temphash", - "sites": self.get_sites(self.MAX_NUMBER_OF_SITES), - "size": random.randint(0, self.MAX_FILE_SIZE_B) - }, - { - "path": "{root[work]}" + "/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_B_workfileLookdev_v{:03d}.dat".format(i, i), # noqa: E501 - "_id": '{}'.format(file_id2), - "hash": "temphash", - "sites": self.get_sites(self.MAX_NUMBER_OF_SITES), - "size": random.randint(0, self.MAX_FILE_SIZE_B) - }, - { - "path": "{root[work]}" + "/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_C_workfileLookdev_v{:03d}.dat".format(i, i), # noqa: E501 - "_id": '{}'.format(file_id3), - "hash": "temphash", - "sites": self.get_sites(self.MAX_NUMBER_OF_SITES), - "size": random.randint(0, self.MAX_FILE_SIZE_B) - } - - ] - if create_files: - for f in ret: - path = f.get("path").replace("{root[work]}", self.ROOT_DIR) - os.makedirs(os.path.dirname(path), exist_ok=True) - with open(path, 'wb') as fp: - fp.write(os.urandom(f.get("size"))) - - return ret - - def get_files_doc(self, i, file_id, file_id2, file_id3): - ret = {} - ret['{}'.format(file_id)] = { - "path": "{root[work]}" + - "/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" # noqa: E501 - "v{:03d}/test_CylinderA_workfileLookdev_v{:03d}.mb".format(i, i), # noqa: E501 - "hash": "temphash", - "sites": ["studio"], - "size": 87236 - } - - ret['{}'.format(file_id2)] = { - "path": "{root[work]}" + - "/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" # noqa: E501 - "v{:03d}/test_CylinderB_workfileLookdev_v{:03d}.mb".format(i, i), # noqa: E501 - "hash": "temphash", - "sites": ["studio"], - "size": 87236 - } - ret['{}'.format(file_id3)] = { - "path": "{root[work]}" + - "/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" # noqa: E501 - "v{:03d}/test_CylinderC_workfileLookdev_v{:03d}.mb".format(i, i), # noqa: E501 - "hash": "temphash", - "sites": ["studio"], - "size": 87236 - } - - return ret - - def get_sites(self, number_of_sites=50): - """ - Return array of sites declaration. - Currently on 1st site has "created_dt" fillled, which should - trigger upload to 'gdrive' site. - 'gdrive' site is appended, its destination for syncing for - Sync Server - Args: - number_of_sites: - - Returns: - - """ - sites = [] - for i in range(number_of_sites): - site = {'name': "local_{}".format(i)} - # do not create null 'created_dt' field, Mongo doesnt like it - if i == 0: - site['created_dt'] = datetime.now() - - sites.append(site) - - sites.append({'name': "gdrive"}) - - return sites - - -if __name__ == '__main__': - tp = TestPerformance('array') - tp.prepare(no_of_records=10000, create_files=True) - # tp.run(10, 3) - - # print('-'*50) - # - # tp = TestPerformance('doc') - # tp.prepare() # enable to prepare data - # tp.run(1000, 3) diff --git a/client/ayon_core/tests/test_avalon_plugin_presets.py b/client/ayon_core/tests/test_avalon_plugin_presets.py deleted file mode 100644 index 4926286ca3..0000000000 --- a/client/ayon_core/tests/test_avalon_plugin_presets.py +++ /dev/null @@ -1,43 +0,0 @@ -from ayon_core.pipeline import ( - install_host, - LegacyCreator, - register_creator_plugin, - discover_creator_plugins, -) - - -class MyTestCreator(LegacyCreator): - - my_test_property = "A" - - def __init__(self, name, asset, options=None, data=None): - super(MyTestCreator, self).__init__(self, name, asset, - options=None, data=None) - - -# this is hack like no other - we need to inject our own avalon host -# and bypass all its validation. Avalon hosts are modules that needs -# `ls` callable as attribute. Voila: -class Test: - __name__ = "test" - ls = len - - @staticmethod - def install(): - register_creator_plugin(MyTestCreator) - - -def test_avalon_plugin_presets(monkeypatch, printer): - install_host(Test) - - plugins = discover_creator_plugins() - printer("Test if we got our test plugin") - assert MyTestCreator in plugins - for p in plugins: - if p.__name__ == "MyTestCreator": - printer("Test if we have overridden existing property") - assert p.my_test_property == "B" - printer("Test if we have overridden superclass property") - assert p.active is False - printer("Test if we have added new property") - assert p.new_property == "new" diff --git a/client/ayon_core/tests/test_lib_restructuralization.py b/client/ayon_core/tests/test_lib_restructuralization.py deleted file mode 100644 index ffbd62b045..0000000000 --- a/client/ayon_core/tests/test_lib_restructuralization.py +++ /dev/null @@ -1,25 +0,0 @@ -# Test for backward compatibility of restructure of lib.py into lib library -# Contains simple imports that should still work - - -def test_backward_compatibility(printer): - printer("Test if imports still work") - try: - from ayon_core.lib import execute_hook - from ayon_core.lib import PypeHook - - from ayon_core.lib import ApplicationLaunchFailed - - from ayon_core.lib import get_ffmpeg_tool_path - from ayon_core.lib import get_last_version_from_path - from ayon_core.lib import get_paths_from_environ - from ayon_core.lib import get_version_from_path - from ayon_core.lib import version_up - - from ayon_core.lib import get_ffprobe_streams - - from ayon_core.lib import source_hash - from ayon_core.lib import run_subprocess - - except ImportError as e: - raise diff --git a/client/ayon_core/tests/test_pyblish_filter.py b/client/ayon_core/tests/test_pyblish_filter.py deleted file mode 100644 index bc20f863c9..0000000000 --- a/client/ayon_core/tests/test_pyblish_filter.py +++ /dev/null @@ -1,60 +0,0 @@ -import os -import pyblish.api -import pyblish.util -import pyblish.plugin -from ayon_core.pipeline.publish.lib import filter_pyblish_plugins -from . import lib - - -def test_pyblish_plugin_filter_modifier(printer, monkeypatch): - """ - Test if pyblish filter can filter and modify plugins on-the-fly. - """ - - lib.setup_empty() - monkeypatch.setitem(os.environ, 'PYBLISHPLUGINPATH', '') - plugins = pyblish.api.registered_plugins() - printer("Test if we have no registered plugins") - assert len(plugins) == 0 - paths = pyblish.api.registered_paths() - printer("Test if we have no registered plugin paths") - assert len(paths) == 0 - - class MyTestPlugin(pyblish.api.InstancePlugin): - my_test_property = 1 - label = "Collect Renderable Camera(s)" - hosts = ["test"] - families = ["default"] - - pyblish.api.register_host("test") - pyblish.api.register_plugin(MyTestPlugin) - pyblish.api.register_discovery_filter(filter_pyblish_plugins) - plugins = pyblish.api.discover() - - printer("Test if only one plugin was discovered") - assert len(plugins) == 1 - printer("Test if properties are modified correctly") - assert plugins[0].label == "loaded from preset" - assert plugins[0].families == ["changed", "by", "preset"] - assert plugins[0].optional is True - - lib.teardown() - - -def test_pyblish_plugin_filter_removal(monkeypatch): - """ Test that plugin can be removed by filter """ - lib.setup_empty() - monkeypatch.setitem(os.environ, 'PYBLISHPLUGINPATH', '') - plugins = pyblish.api.registered_plugins() - - class MyTestRemovedPlugin(pyblish.api.InstancePlugin): - my_test_property = 1 - label = "Collect Renderable Camera(s)" - hosts = ["test"] - families = ["default"] - - pyblish.api.register_host("test") - pyblish.api.register_plugin(MyTestRemovedPlugin) - pyblish.api.register_discovery_filter(filter_pyblish_plugins) - plugins = pyblish.api.discover() - assert len(plugins) == 0 diff --git a/client/ayon_core/tools/adobe_webserver/app.py b/client/ayon_core/tools/adobe_webserver/app.py index 893c076020..b10509f484 100644 --- a/client/ayon_core/tools/adobe_webserver/app.py +++ b/client/ayon_core/tools/adobe_webserver/app.py @@ -82,7 +82,7 @@ class WebServerTool: context = get_global_context() project = context["project_name"] - asset = context["asset_name"] + asset = context["folder_path"] task = context["task_name"] log.info("Sending context change to {}-{}-{}".format(project, asset, diff --git a/client/ayon_core/tools/ayon_utils/models/hierarchy.py b/client/ayon_core/tools/ayon_utils/models/hierarchy.py index 07773dfb78..10495cf10b 100644 --- a/client/ayon_core/tools/ayon_utils/models/hierarchy.py +++ b/client/ayon_core/tools/ayon_utils/models/hierarchy.py @@ -191,12 +191,12 @@ def _get_folder_item_from_hierarchy_item(item): name = item["name"] path_parts = list(item["parents"]) path_parts.append(name) - + path = "/" + "/".join(path_parts) return FolderItem( item["id"], item["parentId"], name, - "/".join(path_parts), + path, item["folderType"], item["label"], None, @@ -307,8 +307,44 @@ class HierarchyModel(object): }) return output + def get_folder_items_by_paths(self, project_name, folder_paths): + """Get folder items by ids. + + This function will query folders if they are not in cache. But the + queried items are not added to cache back. + + Args: + project_name (str): Name of project where to look for folders. + folder_paths (Iterable[str]): Folder paths. + + Returns: + dict[str, Union[FolderItem, None]]: Folder items by id. + """ + + folder_paths = set(folder_paths) + output = {folder_path: None for folder_path in folder_paths} + if not folder_paths: + return output + + if self._folders_items[project_name].is_valid: + cache_data = self._folders_items[project_name].get_data() + for folder_item in cache_data.values(): + if folder_item.path in folder_paths: + output[folder_item.path] = folder_item + return output + folders = ayon_api.get_folders( + project_name, + folder_paths=folder_paths, + fields=["id", "name", "label", "parentId", "path", "folderType"] + ) + # Make sure all folder ids are in output + for folder in folders: + item = _get_folder_item_from_entity(folder) + output[item.path] = item + return output + def get_folder_item(self, project_name, folder_id): - """Get folder items by id. + """Get folder item by id. This function will query folder if they is not in cache. But the queried items are not added to cache back. @@ -325,6 +361,25 @@ class HierarchyModel(object): ) return items.get(folder_id) + def get_folder_item_by_path(self, project_name, folder_path): + """Get folder item by path. + + This function will query folder if they is not in cache. But the + queried items are not added to cache back. + + Args: + project_name (str): Name of project where to look for folders. + folder_path (str): Folder path. + + Returns: + Union[FolderItem, None]: Folder item. + + """ + items = self.get_folder_items_by_paths( + project_name, [folder_path] + ) + return items.get(folder_path) + def get_task_items(self, project_name, folder_id, sender): if not project_name or not folder_id: return [] diff --git a/client/ayon_core/tools/ayon_utils/widgets/__init__.py b/client/ayon_core/tools/ayon_utils/widgets/__init__.py index f58de17c4a..a62bab6751 100644 --- a/client/ayon_core/tools/ayon_utils/widgets/__init__.py +++ b/client/ayon_core/tools/ayon_utils/widgets/__init__.py @@ -3,12 +3,17 @@ from .projects_widget import ( ProjectsCombobox, ProjectsQtModel, ProjectSortFilterProxy, + PROJECT_NAME_ROLE, + PROJECT_IS_CURRENT_ROLE, + PROJECT_IS_ACTIVE_ROLE, + PROJECT_IS_LIBRARY_ROLE, ) from .folders_widget import ( FoldersWidget, FoldersQtModel, FOLDERS_MODEL_SENDER_NAME, + SimpleFoldersWidget, ) from .tasks_widget import ( @@ -27,10 +32,15 @@ __all__ = ( "ProjectsCombobox", "ProjectsQtModel", "ProjectSortFilterProxy", + "PROJECT_NAME_ROLE", + "PROJECT_IS_CURRENT_ROLE", + "PROJECT_IS_ACTIVE_ROLE", + "PROJECT_IS_LIBRARY_ROLE", "FoldersWidget", "FoldersQtModel", "FOLDERS_MODEL_SENDER_NAME", + "SimpleFoldersWidget", "TasksWidget", "TasksQtModel", diff --git a/client/ayon_core/tools/ayon_utils/widgets/folders_widget.py b/client/ayon_core/tools/ayon_utils/widgets/folders_widget.py index 1e395b0368..e42a5b635c 100644 --- a/client/ayon_core/tools/ayon_utils/widgets/folders_widget.py +++ b/client/ayon_core/tools/ayon_utils/widgets/folders_widget.py @@ -2,6 +2,11 @@ import collections from qtpy import QtWidgets, QtGui, QtCore +from ayon_core.lib.events import QueuedEventSystem +from ayon_core.tools.ayon_utils.models import ( + HierarchyModel, + HierarchyExpectedSelection, +) from ayon_core.tools.utils import ( RecursiveSortFilterProxyModel, TreeView, @@ -86,6 +91,21 @@ class FoldersQtModel(QtGui.QStandardItemModel): return QtCore.QModelIndex() return self.indexFromItem(item) + def get_item_id_by_path(self, folder_path): + """Get folder id by path. + + Args: + folder_path (str): Folder path. + + Returns: + Union[str, None]: Folder id or None if folder is not available. + + """ + for folder_id, item in self._items_by_id.values(): + if item.data(FOLDER_PATH_ROLE) == folder_path: + return folder_id + return None + def get_project_name(self): """Project name which model currently use. @@ -390,6 +410,15 @@ class FoldersWidget(QtWidgets.QWidget): return self._get_selected_item_id() + def get_selected_folder_path(self): + """Get selected folder id. + + Returns: + Union[str, None]: Folder path which is selected. + """ + + return self._get_selected_item_value(FOLDER_PATH_ROLE) + def get_selected_folder_label(self): """Selected folder label. @@ -417,8 +446,10 @@ class FoldersWidget(QtWidgets.QWidget): Args: folder_id (Union[str, None]): Folder id or None to deselect. - """ + Returns: + bool: Requested folder was selected. + """ if folder_id is None: self._folders_view.clearSelection() return True @@ -439,6 +470,25 @@ class FoldersWidget(QtWidgets.QWidget): ) return True + def set_selected_folder_path(self, folder_path): + """Set selected folder by path. + + Args: + folder_path (str): Folder path. + + Returns: + bool: Requested folder was selected. + + """ + if folder_path is None: + self._folders_view.clearSelection() + return True + + folder_id = self._folders_model.get_item_id_by_path(folder_path) + if folder_id is None: + return False + return self.set_selected_folder(folder_id) + def set_deselectable(self, enabled): """Set deselectable mode. @@ -473,9 +523,12 @@ class FoldersWidget(QtWidgets.QWidget): self.refreshed.emit() def _get_selected_item_id(self): + return self._get_selected_item_value(FOLDER_ID_ROLE) + + def _get_selected_item_value(self, role): selection_model = self._folders_view.selectionModel() for index in selection_model.selectedIndexes(): - item_id = index.data(FOLDER_ID_ROLE) + item_id = index.data(role) if item_id is not None: return item_id return None @@ -514,3 +567,110 @@ class FoldersWidget(QtWidgets.QWidget): if folder_id is not None: self.set_selected_folder(folder_id) self._controller.expected_folder_selected(folder_id) + + +class SimpleSelectionModel(object): + """Model handling selection changes. + + Triggering events: + - "selection.project.changed" + - "selection.folder.changed" + """ + + event_source = "selection.model" + + def __init__(self, controller): + self._controller = controller + + self._project_name = None + self._folder_id = None + self._task_id = None + self._task_name = None + + def get_selected_project_name(self): + return self._project_name + + def set_selected_project(self, project_name): + self._project_name = project_name + self._controller.emit_event( + "selection.project.changed", + {"project_name": project_name}, + self.event_source + ) + + def get_selected_folder_id(self): + return self._folder_id + + def set_selected_folder(self, folder_id): + if folder_id == self._folder_id: + return + self._folder_id = folder_id + self._controller.emit_event( + "selection.folder.changed", + { + "project_name": self._project_name, + "folder_id": folder_id, + }, + self.event_source + ) + + +class SimpleFoldersController(object): + def __init__(self): + self._event_system = self._create_event_system() + self._hierarchy_model = HierarchyModel(self) + self._selection_model = SimpleSelectionModel(self) + self._expected_selection = HierarchyExpectedSelection( + self, handle_project=False, handle_folder=True, handle_task=False + ) + + def emit_event(self, topic, data=None, source=None): + """Use implemented event system to trigger event.""" + + if data is None: + data = {} + self._event_system.emit(topic, data, source) + + def register_event_callback(self, topic, callback): + self._event_system.add_callback(topic, callback) + + # Model functions + def get_folder_items(self, project_name, sender=None): + return self._hierarchy_model.get_folder_items(project_name, sender) + + def set_selected_project(self, project_name): + self._selection_model.set_selected_project(project_name) + + def set_selected_folder(self, folder_id): + self._selection_model.set_selected_folder(folder_id) + + def get_expected_selection_data(self): + self._expected_selection.get_expected_selection_data() + + def expected_folder_selected(self, folder_id): + self._expected_selection.expected_folder_selected(folder_id) + + def _create_event_system(self): + return QueuedEventSystem() + + +class SimpleFoldersWidget(FoldersWidget): + def __init__(self, controller=None, *args, **kwargs): + if controller is None: + controller = SimpleFoldersController() + super(SimpleFoldersWidget, self).__init__(controller, *args, **kwargs) + + def set_project_name(self, project_name): + self._controller.set_selected_project(project_name) + super(SimpleFoldersWidget, self).set_project_name(project_name) + + def _on_project_selection_change(self, event): + """Ignore project selection change from controller. + + Only who can trigger project change is this widget with + 'set_project_name' which already cares about project change. + + Args: + event (Event): Triggered event. + """ + pass diff --git a/client/ayon_core/tools/ayon_utils/widgets/projects_widget.py b/client/ayon_core/tools/ayon_utils/widgets/projects_widget.py index d3bebecfd6..79ffc77640 100644 --- a/client/ayon_core/tools/ayon_utils/widgets/projects_widget.py +++ b/client/ayon_core/tools/ayon_utils/widgets/projects_widget.py @@ -47,6 +47,22 @@ class ProjectsQtModel(QtGui.QStandardItemModel): def has_content(self): return len(self._project_items) > 0 + def get_index_by_project_name(self, project_name): + """Get index of project by name. + + Args: + project_name (str): Project name. + + Returns: + QtCore.QModelIndex: Index of project item. Index is not valid + if project is not found. + + """ + item = self._project_items.get(project_name) + if item is None: + return QtCore.QModelIndex() + return self.indexFromItem(item) + def set_select_item_visible(self, visible): if self._select_item_visible is visible: return diff --git a/client/ayon_core/tools/ayon_utils/widgets/tasks_widget.py b/client/ayon_core/tools/ayon_utils/widgets/tasks_widget.py index 3d6cc47fe3..b273d83fa6 100644 --- a/client/ayon_core/tools/ayon_utils/widgets/tasks_widget.py +++ b/client/ayon_core/tools/ayon_utils/widgets/tasks_widget.py @@ -214,6 +214,7 @@ class TasksQtModel(QtGui.QStandardItemModel): item.setData(task_item.label, QtCore.Qt.DisplayRole) item.setData(name, ITEM_NAME_ROLE) item.setData(task_item.id, ITEM_ID_ROLE) + item.setData(task_item.task_type, TASK_TYPE_ROLE) item.setData(task_item.parent_id, PARENT_ID_ROLE) item.setData(icon, QtCore.Qt.DecorationRole) @@ -358,6 +359,78 @@ class TasksWidget(QtWidgets.QWidget): self._tasks_model.refresh() + def get_selected_task_info(self): + """Get selected task info. + + Example output:: + + { + "task_id": "5e7e3e3e3e3e3e3e3e3e3e3e", + "task_name": "modeling", + "task_type": "Modeling" + } + + Returns: + dict[str, Union[str, None]]: Task info. + + """ + _, task_id, task_name, task_type = self._get_selected_item_ids() + return { + "task_id": task_id, + "task_name": task_name, + "task_type": task_type, + } + + def get_selected_task_name(self): + """Get selected task name. + + Returns: + Union[str, None]: Task name. + """ + + _, _, task_name, _ = self._get_selected_item_ids() + return task_name + + def get_selected_task_type(self): + """Get selected task type. + + Returns: + Union[str, None]: Task type. + + """ + _, _, _, task_type = self._get_selected_item_ids() + return task_type + + def set_selected_task(self, task_name): + """Set selected task by name. + + Args: + task_name (str): Task name. + + Returns: + bool: Task was selected. + + """ + if task_name is None: + self._tasks_view.clearSelection() + return True + + if task_name == self.get_selected_task_name(): + return True + index = self._tasks_model.get_index_by_name(task_name) + if not index.isValid(): + return False + + proxy_index = self._tasks_proxy_model.mapFromSource(index) + if not proxy_index.isValid(): + return False + + selection_model = self._folders_view.selectionModel() + selection_model.setCurrentIndex( + proxy_index, QtCore.QItemSelectionModel.SelectCurrent + ) + return True + def _on_tasks_refresh_finished(self, event): """Tasks were refreshed in controller. @@ -395,10 +468,11 @@ class TasksWidget(QtWidgets.QWidget): for index in selection_model.selectedIndexes(): task_id = index.data(ITEM_ID_ROLE) task_name = index.data(ITEM_NAME_ROLE) + task_type = index.data(TASK_TYPE_ROLE) parent_id = index.data(PARENT_ID_ROLE) if task_name is not None: - return parent_id, task_id, task_name - return self._selected_folder_id, None, None + return parent_id, task_id, task_name, task_type + return self._selected_folder_id, None, None, None def _on_selection_change(self): # Don't trigger task change during refresh @@ -407,7 +481,7 @@ class TasksWidget(QtWidgets.QWidget): if self._tasks_model.is_refreshing: return - parent_id, task_id, task_name = self._get_selected_item_ids() + parent_id, task_id, task_name, _ = self._get_selected_item_ids() self._controller.set_selected_task(task_id, task_name) self.selection_changed.emit() diff --git a/client/ayon_core/tools/creator/constants.py b/client/ayon_core/tools/creator/constants.py index 5c4bbdcca3..ec555fbe9c 100644 --- a/client/ayon_core/tools/creator/constants.py +++ b/client/ayon_core/tools/creator/constants.py @@ -1,7 +1,7 @@ from qtpy import QtCore -FAMILY_ROLE = QtCore.Qt.UserRole + 1 +PRODUCT_TYPE_ROLE = QtCore.Qt.UserRole + 1 ITEM_ID_ROLE = QtCore.Qt.UserRole + 2 SEPARATOR = "---" diff --git a/client/ayon_core/tools/creator/model.py b/client/ayon_core/tools/creator/model.py index 3650993b9e..bf6c7380a1 100644 --- a/client/ayon_core/tools/creator/model.py +++ b/client/ayon_core/tools/creator/model.py @@ -4,7 +4,7 @@ from qtpy import QtGui, QtCore from ayon_core.pipeline import discover_legacy_creator_plugins from . constants import ( - FAMILY_ROLE, + PRODUCT_TYPE_ROLE, ITEM_ID_ROLE ) @@ -28,15 +28,15 @@ class CreatorsModel(QtGui.QStandardItemModel): item_id = str(uuid.uuid4()) self._creators_by_id[item_id] = creator - label = creator.label or creator.family + label = creator.label or creator.product_type item = QtGui.QStandardItem(label) item.setEditable(False) item.setData(item_id, ITEM_ID_ROLE) - item.setData(creator.family, FAMILY_ROLE) + item.setData(creator.product_type, PRODUCT_TYPE_ROLE) items.append(item) if not items: - item = QtGui.QStandardItem("No registered families") + item = QtGui.QStandardItem("No registered create plugins") item.setEnabled(False) item.setData(False, QtCore.Qt.ItemIsEnabled) items.append(item) @@ -47,15 +47,15 @@ class CreatorsModel(QtGui.QStandardItemModel): def get_creator_by_id(self, item_id): return self._creators_by_id.get(item_id) - def get_indexes_by_family(self, family): + def get_indexes_by_product_type(self, product_type): indexes = [] for row in range(self.rowCount()): index = self.index(row, 0) item_id = index.data(ITEM_ID_ROLE) creator_plugin = self._creators_by_id.get(item_id) if creator_plugin and ( - creator_plugin.label.lower() == family.lower() - or creator_plugin.family.lower() == family.lower() + creator_plugin.label.lower() == product_type.lower() + or creator_plugin.product_type.lower() == product_type.lower() ): indexes.append(index) return indexes diff --git a/client/ayon_core/tools/creator/widgets.py b/client/ayon_core/tools/creator/widgets.py index 05b5469151..53a2ee1080 100644 --- a/client/ayon_core/tools/creator/widgets.py +++ b/client/ayon_core/tools/creator/widgets.py @@ -5,7 +5,7 @@ from qtpy import QtWidgets, QtCore, QtGui import qtawesome -from ayon_core.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS +from ayon_core.pipeline.create import PRODUCT_NAME_ALLOWED_SYMBOLS from ayon_core.tools.utils import ErrorMessageBox if hasattr(QtGui, "QRegularExpressionValidator"): @@ -19,16 +19,16 @@ else: class CreateErrorMessageBox(ErrorMessageBox): def __init__( self, - family, - subset_name, - asset_name, + product_type, + product_name, + folder_path, exc_msg, formatted_traceback, parent ): - self._family = family - self._subset_name = subset_name - self._asset_name = asset_name + self._product_type = product_type + self._product_name = product_name + self._folder_path = folder_path self._exc_msg = exc_msg self._formatted_traceback = formatted_traceback super(CreateErrorMessageBox, self).__init__("Creation failed", parent) @@ -42,14 +42,14 @@ class CreateErrorMessageBox(ErrorMessageBox): def _get_report_data(self): report_message = ( - "Failed to create Product: \"{subset}\"" - " Type: \"{family}\"" - " in Asset: \"{asset}\"" + "Failed to create Product: \"{product_name}\"" + " Type: \"{product_type}\"" + " in Folder: \"{folder_path}\"" "\n\nError: {message}" ).format( - subset=self._subset_name, - family=self._family, - asset=self._asset_name, + product_name=self._product_name, + product_type=self._product_type, + folder_path=self._folder_path, message=self._exc_msg ) if self._formatted_traceback: @@ -74,7 +74,7 @@ class CreateErrorMessageBox(ErrorMessageBox): item_name_widget = QtWidgets.QLabel(self) item_name_widget.setText( item_name_template.format( - self._family, self._subset_name, self._asset_name + self._product_type, self._product_name, self._folder_path ) ) content_layout.addWidget(item_name_widget) @@ -94,16 +94,16 @@ class CreateErrorMessageBox(ErrorMessageBox): content_layout.addWidget(tb_widget) -class SubsetNameValidator(RegularExpressionValidatorClass): +class ProductNameValidator(RegularExpressionValidatorClass): invalid = QtCore.Signal(set) - pattern = "^[{}]*$".format(SUBSET_NAME_ALLOWED_SYMBOLS) + pattern = "^[{}]*$".format(PRODUCT_NAME_ALLOWED_SYMBOLS) def __init__(self): reg = RegularExpressionClass(self.pattern) - super(SubsetNameValidator, self).__init__(reg) + super(ProductNameValidator, self).__init__(reg) def validate(self, text, pos): - results = super(SubsetNameValidator, self).validate(text, pos) + results = super(ProductNameValidator, self).validate(text, pos) if results[0] == self.Invalid: self.invalid.emit(self.invalid_chars(text)) return results @@ -131,7 +131,7 @@ class VariantLineEdit(QtWidgets.QLineEdit): def __init__(self, *args, **kwargs): super(VariantLineEdit, self).__init__(*args, **kwargs) - validator = SubsetNameValidator() + validator = ProductNameValidator() self.setValidator(validator) self.setToolTip("Only alphanumeric characters (A-Z a-z 0-9), " "'_' and '.' are allowed.") @@ -183,24 +183,24 @@ class VariantLineEdit(QtWidgets.QLineEdit): ) -class FamilyDescriptionWidget(QtWidgets.QWidget): - """A family description widget. +class ProductTypeDescriptionWidget(QtWidgets.QWidget): + """A product type description widget. - Shows a family icon, family name and a help description. + Shows a product type icon, name and a help description. Used in creator header. - _________________ - | ____ | - | |icon| FAMILY | - | |____| help | - |_________________| + _______________________ + | ____ | + | |icon| PRODUCT TYPE | + | |____| help | + |_______________________| """ SIZE = 35 def __init__(self, parent=None): - super(FamilyDescriptionWidget, self).__init__(parent=parent) + super(ProductTypeDescriptionWidget, self).__init__(parent=parent) icon_label = QtWidgets.QLabel(self) icon_label.setSizePolicy( @@ -215,14 +215,14 @@ class FamilyDescriptionWidget(QtWidgets.QWidget): label_layout = QtWidgets.QVBoxLayout() label_layout.setSpacing(0) - family_label = QtWidgets.QLabel(self) - family_label.setObjectName("CreatorFamilyLabel") - family_label.setAlignment(QtCore.Qt.AlignBottom | QtCore.Qt.AlignLeft) + product_type_label = QtWidgets.QLabel(self) + product_type_label.setObjectName("CreatorProductTypeLabel") + product_type_label.setAlignment(QtCore.Qt.AlignBottom | QtCore.Qt.AlignLeft) help_label = QtWidgets.QLabel(self) help_label.setAlignment(QtCore.Qt.AlignTop | QtCore.Qt.AlignLeft) - label_layout.addWidget(family_label) + label_layout.addWidget(product_type_label) label_layout.addWidget(help_label) layout = QtWidgets.QHBoxLayout(self) @@ -232,14 +232,15 @@ class FamilyDescriptionWidget(QtWidgets.QWidget): layout.addLayout(label_layout) self._help_label = help_label - self._family_label = family_label + self._product_type_label = product_type_label self._icon_label = icon_label def set_item(self, creator_plugin): - """Update elements to display information of a family item. + """Update elements to display information of a product type item. Args: - item (dict): A family item as registered with name, help and icon + creator_plugin (dict): A product type item as registered with + name, help and icon. Returns: None @@ -247,7 +248,7 @@ class FamilyDescriptionWidget(QtWidgets.QWidget): """ if not creator_plugin: self._icon_label.setPixmap(None) - self._family_label.setText("") + self._product_type_label.setText("") self._help_label.setText("") return @@ -268,5 +269,5 @@ class FamilyDescriptionWidget(QtWidgets.QWidget): creator_help = docstring.splitlines()[0] if docstring else "" self._icon_label.setPixmap(pixmap) - self._family_label.setText(creator_plugin.family) + self._product_type_label.setText(creator_plugin.product_type) self._help_label.setText(creator_help) diff --git a/client/ayon_core/tools/creator/window.py b/client/ayon_core/tools/creator/window.py index 676e1c3959..7bf65ea510 100644 --- a/client/ayon_core/tools/creator/window.py +++ b/client/ayon_core/tools/creator/window.py @@ -14,7 +14,7 @@ from ayon_core.pipeline import ( get_current_task_name, ) from ayon_core.pipeline.create import ( - SUBSET_NAME_ALLOWED_SYMBOLS, + PRODUCT_NAME_ALLOWED_SYMBOLS, legacy_create, CreatorError, ) @@ -23,7 +23,7 @@ from .model import CreatorsModel from .widgets import ( CreateErrorMessageBox, VariantLineEdit, - FamilyDescriptionWidget + ProductTypeDescriptionWidget ) from .constants import ( ITEM_ID_ROLE, @@ -45,7 +45,7 @@ class CreatorWindow(QtWidgets.QDialog): self.windowFlags() | QtCore.Qt.WindowStaysOnTopHint ) - creator_info = FamilyDescriptionWidget(self) + creator_info = ProductTypeDescriptionWidget(self) creators_model = CreatorsModel() @@ -56,19 +56,19 @@ class CreatorWindow(QtWidgets.QDialog): creators_view.setObjectName("CreatorsView") creators_view.setModel(creators_proxy) - asset_name_input = QtWidgets.QLineEdit(self) + folder_path_input = QtWidgets.QLineEdit(self) variant_input = VariantLineEdit(self) - subset_name_input = QtWidgets.QLineEdit(self) - subset_name_input.setEnabled(False) + product_name_input = QtWidgets.QLineEdit(self) + product_name_input.setEnabled(False) - subset_button = QtWidgets.QPushButton() - subset_button.setFixedWidth(18) - subset_menu = QtWidgets.QMenu(subset_button) - subset_button.setMenu(subset_menu) + variants_btn = QtWidgets.QPushButton() + variants_btn.setFixedWidth(18) + variants_menu = QtWidgets.QMenu(variants_btn) + variants_btn.setMenu(variants_menu) name_layout = QtWidgets.QHBoxLayout() name_layout.addWidget(variant_input) - name_layout.addWidget(subset_button) + name_layout.addWidget(variants_btn) name_layout.setSpacing(3) name_layout.setContentsMargins(0, 0, 0, 0) @@ -76,13 +76,13 @@ class CreatorWindow(QtWidgets.QDialog): body_layout.setContentsMargins(0, 0, 0, 0) body_layout.addWidget(creator_info, 0) - body_layout.addWidget(QtWidgets.QLabel("Family", self), 0) + body_layout.addWidget(QtWidgets.QLabel("Product type", self), 0) body_layout.addWidget(creators_view, 1) - body_layout.addWidget(QtWidgets.QLabel("Asset", self), 0) - body_layout.addWidget(asset_name_input, 0) - body_layout.addWidget(QtWidgets.QLabel("Subset", self), 0) + body_layout.addWidget(QtWidgets.QLabel("Folder path", self), 0) + body_layout.addWidget(folder_path_input, 0) + body_layout.addWidget(QtWidgets.QLabel("Product name", self), 0) body_layout.addLayout(name_layout, 0) - body_layout.addWidget(subset_name_input, 0) + body_layout.addWidget(product_name_input, 0) useselection_chk = QtWidgets.QCheckBox("Use selection", self) useselection_chk.setCheckState(QtCore.Qt.Checked) @@ -116,7 +116,7 @@ class CreatorWindow(QtWidgets.QDialog): variant_input.returnPressed.connect(self._on_create) variant_input.textChanged.connect(self._on_data_changed) variant_input.report.connect(self.echo) - asset_name_input.textChanged.connect(self._on_data_changed) + folder_path_input.textChanged.connect(self._on_data_changed) creators_view.selectionModel().currentChanged.connect( self._on_selection_changed ) @@ -134,15 +134,15 @@ class CreatorWindow(QtWidgets.QDialog): self._create_btn = create_btn self._useselection_chk = useselection_chk self._variant_input = variant_input - self._subset_name_input = subset_name_input - self._asset_name_input = asset_name_input + self._product_name_input = product_name_input + self._folder_path_input = folder_path_input self._creators_model = creators_model self._creators_proxy = creators_proxy self._creators_view = creators_view - self._subset_btn = subset_button - self._subset_menu = subset_menu + self._variants_btn = variants_btn + self._variants_menu = variants_menu self._msg_label = msg_label @@ -160,7 +160,7 @@ class CreatorWindow(QtWidgets.QDialog): self._create_btn.setEnabled(valid) def _build_menu(self, default_names=None): - """Create optional predefined subset names + """Create optional predefined variants. Args: default_names(list): all predefined names @@ -171,8 +171,8 @@ class CreatorWindow(QtWidgets.QDialog): if not default_names: default_names = [] - menu = self._subset_menu - button = self._subset_btn + menu = self._variants_menu + button = self._variants_btn # Get and destroy the action group group = button.findChild(QtWidgets.QActionGroup) @@ -211,10 +211,10 @@ class CreatorWindow(QtWidgets.QDialog): item_id = index.data(ITEM_ID_ROLE) creator_plugin = self._creators_model.get_creator_by_id(item_id) user_input_text = self._variant_input.text() - asset_name = self._asset_name_input.text() + folder_path = self._folder_path_input.text() - # Early exit if no asset name - if not asset_name: + # Early exit if no folder path + if not folder_path: self._build_menu() self.echo("Asset name is required ..") self._set_valid_state(False) @@ -225,64 +225,63 @@ class CreatorWindow(QtWidgets.QDialog): if creator_plugin: # Get the asset from the database which match with the name asset_doc = get_asset_by_name( - project_name, asset_name, fields=["_id"] + project_name, folder_path, fields=["_id"] ) # Get plugin if not asset_doc or not creator_plugin: - subset_name = user_input_text self._build_menu() if not creator_plugin: - self.echo("No registered families ..") + self.echo("No registered product types ..") else: - self.echo("Asset '%s' not found .." % asset_name) + self.echo("Folder '{}' not found ..".format(folder_path)) self._set_valid_state(False) return - asset_id = asset_doc["_id"] + folder_id = asset_doc["_id"] task_name = get_current_task_name() - # Calculate subset name with Creator plugin - subset_name = creator_plugin.get_subset_name( - user_input_text, task_name, asset_id, project_name + # Calculate product name with Creator plugin + product_name = creator_plugin.get_product_name( + project_name, folder_id, task_name, user_input_text ) # Force replacement of prohibited symbols # QUESTION should Creator care about this and here should be only # validated with schema regex? - # Allow curly brackets in subset name for dynamic keys + # Allow curly brackets in product name for dynamic keys curly_left = "__cbl__" curly_right = "__cbr__" - tmp_subset_name = ( - subset_name + tmp_product_name = ( + product_name .replace("{", curly_left) .replace("}", curly_right) ) # Replace prohibited symbols - tmp_subset_name = re.sub( - "[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS), + tmp_product_name = re.sub( + "[^{}]+".format(PRODUCT_NAME_ALLOWED_SYMBOLS), "", - tmp_subset_name + tmp_product_name ) - subset_name = ( - tmp_subset_name + product_name = ( + tmp_product_name .replace(curly_left, "{") .replace(curly_right, "}") ) - self._subset_name_input.setText(subset_name) + self._product_name_input.setText(product_name) - # Get all subsets of the current asset + # Get all products of the current folder subset_docs = get_subsets( - project_name, asset_ids=[asset_id], fields=["name"] + project_name, asset_ids=[folder_id], fields=["name"] ) - existing_subset_names = { + existing_product_names = { subset_doc["name"] for subset_doc in subset_docs } - existing_subset_names_low = set( + existing_product_names_low = set( _name.lower() - for _name in existing_subset_names + for _name in existing_product_names ) # Defaults to dropdown @@ -296,26 +295,26 @@ class CreatorWindow(QtWidgets.QDialog): # Replace compare_regex = re.compile(re.sub( - user_input_text, "(.+)", subset_name, flags=re.IGNORECASE + user_input_text, "(.+)", product_name, flags=re.IGNORECASE )) - subset_hints = set() + variant_hints = set() if user_input_text: - for _name in existing_subset_names: + for _name in existing_product_names: _result = compare_regex.search(_name) if _result: - subset_hints |= set(_result.groups()) + variant_hints |= set(_result.groups()) - if subset_hints: + if variant_hints: if defaults: defaults.append(SEPARATOR) - defaults.extend(subset_hints) + defaults.extend(variant_hints) self._build_menu(defaults) - # Indicate subset existence + # Indicate product existence if not user_input_text: self._variant_input.as_empty() - elif subset_name.lower() in existing_subset_names_low: - # validate existence of subset name with lowered text + elif product_name.lower() in existing_product_names_low: + # validate existence of product name with lowered text # - "renderMain" vs. "rensermain" mean same path item for # windows self._variant_input.as_exists() @@ -323,7 +322,7 @@ class CreatorWindow(QtWidgets.QDialog): self._variant_input.as_new() # Update the valid state - valid = subset_name.strip() != "" + valid = product_name.strip() != "" self._set_valid_state(valid) @@ -373,33 +372,37 @@ class CreatorWindow(QtWidgets.QDialog): self.setStyleSheet(style.load_stylesheet()) def refresh(self): - self._asset_name_input.setText(get_current_asset_name()) + self._folder_path_input.setText(get_current_asset_name()) self._creators_model.reset() - pype_project_setting = ( + product_types_smart_select = ( get_current_project_settings() ["global"] ["tools"] ["creator"] - ["families_smart_select"] + ["product_types_smart_select"] ) current_index = None - family = None + product_type = None task_name = get_current_task_name() or None lowered_task_name = task_name.lower() if task_name: - for _family, _task_names in pype_project_setting.items(): - _low_task_names = {name.lower() for name in _task_names} + for smart_item in product_types_smart_select: + _low_task_names = { + name.lower() for name in smart_item["task_names"] + } for _task_name in _low_task_names: if _task_name in lowered_task_name: - family = _family + product_type = smart_item["name"] break - if family: + if product_type: break - if family: - indexes = self._creators_model.get_indexes_by_family(family) + if product_type: + indexes = self._creators_model.get_indexes_by_product_type( + product_type + ) if indexes: index = indexes[0] current_index = self._creators_proxy.mapFromSource(index) @@ -420,8 +423,8 @@ class CreatorWindow(QtWidgets.QDialog): if creator_plugin is None: return - subset_name = self._subset_name_input.text() - asset_name = self._asset_name_input.text() + product_name = self._product_name_input.text() + folder_path = self._folder_path_input.text() use_selection = self._useselection_chk.isChecked() variant = self._variant_input.text() @@ -430,8 +433,8 @@ class CreatorWindow(QtWidgets.QDialog): try: legacy_create( creator_plugin, - subset_name, - asset_name, + product_name, + folder_path, options={"useSelection": use_selection}, data={"variant": variant} ) @@ -451,9 +454,9 @@ class CreatorWindow(QtWidgets.QDialog): if error_info: box = CreateErrorMessageBox( - creator_plugin.family, - subset_name, - asset_name, + creator_plugin.product_type, + product_name, + folder_path, *error_info, parent=self ) @@ -462,7 +465,7 @@ class CreatorWindow(QtWidgets.QDialog): self._message_dialog = box else: - self.echo("Created %s .." % subset_name) + self.echo("Created %s .." % product_name) def _on_msg_timer(self): self._msg_label.setText("") @@ -473,7 +476,7 @@ class CreatorWindow(QtWidgets.QDialog): def show(parent=None): - """Display asset creator GUI + """Display product creator GUI Arguments: debug (bool, optional): Run loader in debug-mode, diff --git a/client/ayon_core/tools/experimental_tools/tools_def.py b/client/ayon_core/tools/experimental_tools/tools_def.py index 568c7032d0..7def3551de 100644 --- a/client/ayon_core/tools/experimental_tools/tools_def.py +++ b/client/ayon_core/tools/experimental_tools/tools_def.py @@ -1,5 +1,4 @@ import os -from ayon_core.settings import get_local_settings # Constant key under which local settings are stored LOCAL_EXPERIMENTAL_KEY = "experimental_tools" @@ -89,9 +88,13 @@ class ExperimentalTools: "New publisher", "Combined creation and publishing into one tool.", self._show_publisher, - hosts_filter=["blender", "maya", "nuke", "celaction", "flame", - "fusion", "harmony", "hiero", "resolve", - "tvpaint", "unreal"] + hosts_filter=[ + "celaction", + "flame", + "harmony", + "hiero", + "resolve", + ] ) ] @@ -139,7 +142,7 @@ class ExperimentalTools: def get_tools_for_host(self, host_name=None): if not host_name: - host_name = os.environ.get("AVALON_APP") + host_name = os.environ.get("AYON_HOST_NAME") tools = [] for tool in self.tools: if ( @@ -151,7 +154,10 @@ class ExperimentalTools: def refresh_availability(self): """Reload local settings and check if any tool changed ability.""" - local_settings = get_local_settings() + + # NOTE AYON does not have implemented settings for experimental + # tools. + local_settings = {} experimental_settings = ( local_settings.get(LOCAL_EXPERIMENTAL_KEY) ) or {} diff --git a/client/ayon_core/tools/launcher/models/actions.py b/client/ayon_core/tools/launcher/models/actions.py index 37024b5810..6b9a33e57a 100644 --- a/client/ayon_core/tools/launcher/models/actions.py +++ b/client/ayon_core/tools/launcher/models/actions.py @@ -69,9 +69,9 @@ class ApplicationAction(LauncherAction): _log = None required_session_keys = ( - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK" + "AYON_PROJECT_NAME", + "AYON_FOLDER_PATH", + "AYON_TASK_NAME" ) @property @@ -85,7 +85,7 @@ class ApplicationAction(LauncherAction): if not session.get(key): return False - project_name = session["AVALON_PROJECT"] + project_name = session["AYON_PROJECT_NAME"] project_entity = self.project_entities[project_name] apps = project_entity["attrib"].get("applications") if not apps or self.application.full_name not in apps: @@ -119,13 +119,13 @@ class ApplicationAction(LauncherAction): ApplicationLaunchFailed, ) - project_name = session["AVALON_PROJECT"] - asset_name = session["AVALON_ASSET"] - task_name = session["AVALON_TASK"] + project_name = session["AYON_PROJECT_NAME"] + folder_path = session["AYON_FOLDER_PATH"] + task_name = session["AYON_TASK_NAME"] try: self.application.launch( project_name=project_name, - asset_name=asset_name, + folder_path=folder_path, task_name=task_name, **self.data ) @@ -293,6 +293,34 @@ class ActionsModel: self._get_action_objects() self._controller.emit_event("actions.refresh.finished") + def _should_start_last_workfile( + self, + project_name, + task_id, + identifier, + host_name, + not_open_workfile_actions + ): + from ayon_core.lib.applications import should_start_last_workfile + + if identifier in not_open_workfile_actions: + return not not_open_workfile_actions[identifier] + + task_name = None + task_type = None + if task_id is not None: + task = self._controller.get_task_entity(project_name, task_id) + task_name = task["name"] + task_type = task["taskType"] + + output = should_start_last_workfile( + project_name, + host_name, + task_name, + task_type + ) + return output + def get_action_items(self, project_name, folder_id, task_id): """Get actions for project. @@ -304,7 +332,6 @@ class ActionsModel: Returns: list[ActionItem]: List of actions. """ - not_open_workfile_actions = self._get_no_last_workfile_for_context( project_name, folder_id, task_id) session = self._prepare_session(project_name, folder_id, task_id) @@ -318,8 +345,15 @@ class ActionsModel: # Handling of 'force_not_open_workfile' for applications if action_item.is_application: action_item = action_item.copy() + start_last_workfile = self._should_start_last_workfile( + project_name, + task_id, + identifier, + action.application.host_name, + not_open_workfile_actions + ) action_item.force_not_open_workfile = ( - not_open_workfile_actions.get(identifier, False) + not start_last_workfile ) output.append(action_item) @@ -359,11 +393,15 @@ class ActionsModel: per_action = self._get_no_last_workfile_for_context( project_name, folder_id, task_id ) - force_not_open_workfile = per_action.get(identifier, False) - if force_not_open_workfile: - action.data["start_last_workfile"] = False - else: - action.data.pop("start_last_workfile", None) + start_last_workfile = self._should_start_last_workfile( + project_name, + task_id, + identifier, + action.application.host_name, + per_action + ) + action.data["start_last_workfile"] = start_last_workfile + action.process(session) except Exception as exc: self.log.warning("Action trigger failed.", exc_info=True) @@ -416,6 +454,10 @@ class ActionsModel: task_name = task["name"] return { + "AYON_PROJECT_NAME": project_name, + "AYON_FOLDER_PATH": folder_path, + "AYON_TASK_NAME": task_name, + # Deprecated - kept for backwards compatibility "AVALON_PROJECT": project_name, "AVALON_ASSET": folder_path, "AVALON_TASK": task_name, @@ -457,9 +499,11 @@ class ActionsModel: if is_application: action.project_entities[project_name] = project_entity action.project_settings[project_name] = project_settings + label = action.label or identifier variant_label = getattr(action, "label_variant", None) icon = get_action_icon(action) + item = ActionItem( identifier, label, diff --git a/client/ayon_core/tools/launcher/ui/actions_widget.py b/client/ayon_core/tools/launcher/ui/actions_widget.py index 6667b4ed5f..617f3b0c91 100644 --- a/client/ayon_core/tools/launcher/ui/actions_widget.py +++ b/client/ayon_core/tools/launcher/ui/actions_widget.py @@ -358,6 +358,8 @@ class ActionsWidget(QtWidgets.QWidget): def _on_model_refresh(self): self._proxy_model.sort(0) + # Force repaint all items + self._view.update() def _on_animation(self): time_now = time.time() diff --git a/client/ayon_core/tools/loader/control.py b/client/ayon_core/tools/loader/control.py index 29ca06e3e2..d2ee1d890c 100644 --- a/client/ayon_core/tools/loader/control.py +++ b/client/ayon_core/tools/loader/control.py @@ -320,7 +320,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): context = get_current_context() folder_id = None project_name = context.get("project_name") - asset_name = context.get("asset_name") + asset_name = context.get("folder_path") if project_name and asset_name: folder = ayon_api.get_folder_by_path( project_name, asset_name, fields=["id"] diff --git a/client/ayon_core/tools/loader/models/actions.py b/client/ayon_core/tools/loader/models/actions.py index c70ccb3e18..dff15ea16c 100644 --- a/client/ayon_core/tools/loader/models/actions.py +++ b/client/ayon_core/tools/loader/models/actions.py @@ -809,10 +809,10 @@ class LoaderActionsModel: error_info = [] if loader.is_multiple_contexts_compatible: - subset_names = [] + product_names = [] for context in version_contexts: - subset_name = context.get("subset", {}).get("name") or "N/A" - subset_names.append(subset_name) + product_name = context.get("subset", {}).get("name") or "N/A" + product_names.append(product_name) try: load_with_subset_contexts( loader, @@ -831,12 +831,12 @@ class LoaderActionsModel: str(exc), formatted_traceback, None, - ", ".join(subset_names), + ", ".join(product_names), None )) else: for version_context in version_contexts: - subset_name = ( + product_name = ( version_context.get("subset", {}).get("name") or "N/A" ) try: @@ -860,7 +860,7 @@ class LoaderActionsModel: str(exc), formatted_traceback, None, - subset_name, + product_name, None )) diff --git a/client/ayon_core/tools/loader/models/site_sync.py b/client/ayon_core/tools/loader/models/site_sync.py index e6158ea280..2a6f1558ad 100644 --- a/client/ayon_core/tools/loader/models/site_sync.py +++ b/client/ayon_core/tools/loader/models/site_sync.py @@ -325,21 +325,21 @@ class SiteSyncModel: repre_docs = list(get_representations( project_name, representation_ids=representation_ids )) - families_per_repre_id = { + product_type_by_repre_id = { item["_id"]: item["context"]["family"] for item in repre_docs } for repre_id in representation_ids: - family = families_per_repre_id[repre_id] + product_type = product_type_by_repre_id[repre_id] if identifier == DOWNLOAD_IDENTIFIER: self._add_site( - project_name, repre_id, active_site, family + project_name, repre_id, active_site, product_type ) elif identifier == UPLOAD_IDENTIFIER: self._add_site( - project_name, repre_id, remote_site, family + project_name, repre_id, remote_site, product_type ) elif identifier == REMOVE_IDENTIFIER: @@ -485,13 +485,13 @@ class SiteSyncModel: representation_ids=representation_ids, ) - def _add_site(self, project_name, repre_id, site_name, family): + def _add_site(self, project_name, repre_id, site_name, product_type): self._site_sync_addon.add_site( project_name, repre_id, site_name, force=True ) # TODO this should happen in site sync addon - if family != "workfile": + if product_type != "workfile": return links = get_linked_representation_id( diff --git a/client/ayon_core/tools/loader/ui/folders_widget.py b/client/ayon_core/tools/loader/ui/folders_widget.py index 9d5b95b2a6..34881ab49d 100644 --- a/client/ayon_core/tools/loader/ui/folders_widget.py +++ b/client/ayon_core/tools/loader/ui/folders_widget.py @@ -56,34 +56,34 @@ class UnderlinesFolderDelegate(QtWidgets.QItemDelegate): item_rect = QtCore.QRect(option.rect) item_rect.setHeight(option.rect.height() - self.bar_height) - subset_colors = index.data(UNDERLINE_COLORS_ROLE) or [] + product_colors = index.data(UNDERLINE_COLORS_ROLE) or [] - subset_colors_width = 0 - if subset_colors: - subset_colors_width = option.rect.width() / len(subset_colors) + product_colors_width = 0 + if product_colors: + product_colors_width = option.rect.width() / len(product_colors) - subset_rects = [] + product_rects = [] counter = 0 - for subset_c in subset_colors: + for product_c in product_colors: new_color = None new_rect = None - if subset_c: - new_color = QtGui.QColor(subset_c) + if product_c: + new_color = QtGui.QColor(product_c) new_rect = QtCore.QRect( - option.rect.left() + (counter * subset_colors_width), + option.rect.left() + (counter * product_colors_width), option.rect.top() + ( option.rect.height() - self.bar_height ), - subset_colors_width, + product_colors_width, self.bar_height ) - subset_rects.append((new_color, new_rect)) + product_rects.append((new_color, new_rect)) counter += 1 # Background if option.state & QtWidgets.QStyle.State_Selected: - if len(subset_colors) == 0: + if len(product_colors) == 0: item_rect.setTop(item_rect.top() + (self.bar_height / 2)) if option.state & QtWidgets.QStyle.State_MouseOver: @@ -106,10 +106,10 @@ class UnderlinesFolderDelegate(QtWidgets.QItemDelegate): ) if option.state & QtWidgets.QStyle.State_Selected: - for color, subset_rect in subset_rects: - if not color or not subset_rect: + for color, product_rect in product_rects: + if not color or not product_rect: continue - painter.fillRect(subset_rect, QtGui.QBrush(color)) + painter.fillRect(product_rect, QtGui.QBrush(color)) # Icon icon_index = index.model().index( diff --git a/client/ayon_core/tools/loader/ui/products_widget.py b/client/ayon_core/tools/loader/ui/products_widget.py index 5a29f3f762..3025ec18bd 100644 --- a/client/ayon_core/tools/loader/ui/products_widget.py +++ b/client/ayon_core/tools/loader/ui/products_widget.py @@ -106,7 +106,7 @@ class ProductsWidget(QtWidgets.QWidget): products_view = DeselectableTreeView(self) # TODO - define custom object name in style - products_view.setObjectName("SubsetView") + products_view.setObjectName("ProductView") products_view.setSelectionMode( QtWidgets.QAbstractItemView.ExtendedSelection ) diff --git a/client/ayon_core/tools/publisher/constants.py b/client/ayon_core/tools/publisher/constants.py index 4630eb144b..6676f14c3d 100644 --- a/client/ayon_core/tools/publisher/constants.py +++ b/client/ayon_core/tools/publisher/constants.py @@ -6,9 +6,9 @@ CONTEXT_LABEL = "Context" # Not showed anywhere - used as identifier CONTEXT_GROUP = "__ContextGroup__" -CONVERTOR_ITEM_GROUP = "Incompatible subsets" +CONVERTOR_ITEM_GROUP = "Incompatible products" -# Allowed symbols for subset name (and variant) +# Allowed symbols for product name (and variant) # - characters, numbers, unsercore and dash VARIANT_TOOLTIP = ( "Variant may contain alphabetical characters (a-Z)" @@ -24,7 +24,7 @@ SORT_VALUE_ROLE = QtCore.Qt.UserRole + 2 IS_GROUP_ROLE = QtCore.Qt.UserRole + 3 CREATOR_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 4 CREATOR_THUMBNAIL_ENABLED_ROLE = QtCore.Qt.UserRole + 5 -FAMILY_ROLE = QtCore.Qt.UserRole + 6 +PRODUCT_TYPE_ROLE = QtCore.Qt.UserRole + 6 GROUP_ROLE = QtCore.Qt.UserRole + 7 CONVERTER_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 8 CREATOR_SORT_ROLE = QtCore.Qt.UserRole + 9 @@ -48,7 +48,7 @@ __all__ = ( "CREATOR_IDENTIFIER_ROLE", "CREATOR_THUMBNAIL_ENABLED_ROLE", "CREATOR_SORT_ROLE", - "FAMILY_ROLE", + "PRODUCT_TYPE_ROLE", "GROUP_ROLE", "CONVERTER_IDENTIFIER_ROLE", diff --git a/client/ayon_core/tools/publisher/control.py b/client/ayon_core/tools/publisher/control.py index 988362fee4..712142f662 100644 --- a/client/ayon_core/tools/publisher/control.py +++ b/client/ayon_core/tools/publisher/control.py @@ -12,14 +12,13 @@ from abc import ABCMeta, abstractmethod import six import arrow import pyblish.api +import ayon_api from ayon_core.client import ( - get_assets, - get_asset_by_id, + get_asset_by_name, get_subsets, - get_asset_name_identifier, ) -from ayon_core.lib.events import EventSystem +from ayon_core.lib.events import QueuedEventSystem from ayon_core.lib.attribute_definitions import ( UIDef, serialize_attr_defs, @@ -43,6 +42,7 @@ from ayon_core.pipeline.create.context import ( ConvertorsOperationFailed, ) from ayon_core.pipeline.publish import get_publish_instance_label +from ayon_core.tools.ayon_utils.models import HierarchyModel # Define constant for plugin orders offset PLUGIN_ORDER_OFFSET = 0.5 @@ -69,103 +69,19 @@ class MainThreadItem: class AssetDocsCache: """Cache asset documents for creation part.""" - projection = { - "_id": True, - "name": True, - "data.visualParent": True, - "data.tasks": True, - "data.parents": True, - } - def __init__(self, controller): self._controller = controller - self._asset_docs = None - self._asset_docs_hierarchy = None - self._task_names_by_asset_name = {} - self._asset_docs_by_name = {} - self._full_asset_docs_by_name = {} + self._asset_docs_by_path = {} def reset(self): - self._asset_docs = None - self._asset_docs_hierarchy = None - self._task_names_by_asset_name = {} - self._asset_docs_by_name = {} - self._full_asset_docs_by_name = {} + self._asset_docs_by_path = {} - def _query(self): - if self._asset_docs is not None: - return - - project_name = self._controller.project_name - asset_docs = list(get_assets( - project_name, fields=self.projection.keys() - )) - asset_docs_by_name = {} - task_names_by_asset_name = {} - for asset_doc in asset_docs: - if "data" not in asset_doc: - asset_doc["data"] = {"tasks": {}, "visualParent": None} - elif "tasks" not in asset_doc["data"]: - asset_doc["data"]["tasks"] = {} - - asset_name = get_asset_name_identifier(asset_doc) - asset_tasks = asset_doc["data"]["tasks"] - task_names_by_asset_name[asset_name] = list(asset_tasks.keys()) - asset_docs_by_name[asset_name] = asset_doc - - self._asset_docs = asset_docs - self._asset_docs_by_name = asset_docs_by_name - self._task_names_by_asset_name = task_names_by_asset_name - - def get_asset_docs(self): - self._query() - return copy.deepcopy(self._asset_docs) - - def get_asset_hierarchy(self): - """Prepare asset documents into hierarchy. - - Convert ObjectId to string. Asset id is not used during whole - process of publisher but asset name is used rather. - - Returns: - Dict[Union[str, None]: Any]: Mapping of parent id to it's children. - Top level assets have parent id 'None'. - """ - - if self._asset_docs_hierarchy is None: - _queue = collections.deque(self.get_asset_docs()) - - output = collections.defaultdict(list) - while _queue: - asset_doc = _queue.popleft() - asset_doc["_id"] = str(asset_doc["_id"]) - parent_id = asset_doc["data"]["visualParent"] - if parent_id is not None: - parent_id = str(parent_id) - asset_doc["data"]["visualParent"] = parent_id - output[parent_id].append(asset_doc) - self._asset_docs_hierarchy = output - return copy.deepcopy(self._asset_docs_hierarchy) - - def get_task_names_by_asset_name(self): - self._query() - return copy.deepcopy(self._task_names_by_asset_name) - - def get_asset_by_name(self, asset_name): - self._query() - asset_doc = self._asset_docs_by_name.get(asset_name) - if asset_doc is None: - return None - return copy.deepcopy(asset_doc) - - def get_full_asset_by_name(self, asset_name): - self._query() - if asset_name not in self._full_asset_docs_by_name: - asset_doc = self._asset_docs_by_name.get(asset_name) + def get_asset_doc_by_folder_path(self, folder_path): + if folder_path not in self._asset_docs_by_path: project_name = self._controller.project_name - full_asset_doc = get_asset_by_id(project_name, asset_doc["_id"]) - self._full_asset_docs_by_name[asset_name] = full_asset_doc - return copy.deepcopy(self._full_asset_docs_by_name[asset_name]) + asset_doc = get_asset_by_name(project_name, folder_path) + self._asset_docs_by_path[folder_path] = asset_doc + return copy.deepcopy(self._asset_docs_by_path[folder_path]) class PublishReportMaker: @@ -351,7 +267,8 @@ class PublishReportMaker: return { "name": instance.data.get("name"), "label": get_publish_instance_label(instance), - "family": instance.data["family"], + "product_type": instance.data.get("productType"), + "family": instance.data.get("family"), "families": instance.data.get("families") or [], "exists": exists, "creator_identifier": instance.data.get("creator_identifier"), @@ -879,7 +796,7 @@ class CreatorItem: self, identifier, creator_type, - family, + product_type, label, group_label, icon, @@ -894,7 +811,7 @@ class CreatorItem: ): self.identifier = identifier self.creator_type = creator_type - self.family = family + self.product_type = product_type self.label = label self.group_label = group_label self.icon = icon @@ -943,7 +860,7 @@ class CreatorItem: return cls( identifier, creator_type, - creator.family, + creator.product_type, creator.label or identifier, creator.get_group_label(), creator.get_icon(), @@ -967,7 +884,7 @@ class CreatorItem: return { "identifier": self.identifier, "creator_type": str(self.creator_type), - "family": self.family, + "product_type": self.product_type, "label": self.label, "group_label": self.group_label, "icon": self.icon, @@ -1035,13 +952,13 @@ class AbstractPublisherController(object): @property @abstractmethod - def current_asset_name(self): - """Current context asset name. + def current_folder_path(self): + """Current context folder path. Returns: - Union[str, None]: Name of asset. - """ + Union[str, None]: Folder path. + """ pass @property @@ -1105,19 +1022,7 @@ class AbstractPublisherController(object): pass @abstractmethod - def get_asset_docs(self): - pass - - @abstractmethod - def get_asset_hierarchy(self): - pass - - @abstractmethod - def get_task_names_by_asset_names(self, asset_names): - pass - - @abstractmethod - def get_existing_subset_names(self, asset_name): + def get_existing_product_names(self, folder_path): pass @abstractmethod @@ -1152,23 +1057,23 @@ class AbstractPublisherController(object): pass @abstractmethod - def get_subset_name( + def get_product_name( self, creator_identifier, variant, task_name, - asset_name, + folder_path, instance_id=None ): - """Get subset name based on passed data. + """Get product name based on passed data. Args: creator_identifier (str): Identifier of creator which should be - responsible for subset name creation. + responsible for product name creation. variant (str): Variant value from user's input. task_name (str): Name of task for which is instance created. - asset_name (str): Name of asset for which is instance created. - instance_id (Union[str, None]): Existing instance id when subset + folder_path (str): Folder path for which is instance created. + instance_id (Union[str, None]): Existing instance id when product name is updated. """ @@ -1176,7 +1081,7 @@ class AbstractPublisherController(object): @abstractmethod def create( - self, creator_identifier, subset_name, instance_data, options + self, creator_identifier, product_name, instance_data, options ): """Trigger creation by creator identifier. @@ -1184,9 +1089,9 @@ class AbstractPublisherController(object): Args: creator_identifier (str): Identifier of Creator plugin. - subset_name (str): Calculated subset name. + product_name (str): Calculated product name. instance_data (Dict[str, Any]): Base instance data with variant, - asset name and task name. + folder path and task name. options (Dict[str, Any]): Data from pre-create attributes. """ @@ -1499,13 +1404,22 @@ class BasePublisherController(AbstractPublisherController): """ if self._event_system is None: - self._event_system = EventSystem() + self._event_system = QueuedEventSystem() return self._event_system - def _emit_event(self, topic, data=None): + # Events system + def emit_event(self, topic, data=None, source=None): + """Use implemented event system to trigger event.""" + if data is None: data = {} - self.event_system.emit(topic, data, "controller") + self.event_system.emit(topic, data, source) + + def register_event_callback(self, topic, callback): + self.event_system.add_callback(topic, callback) + + def _emit_event(self, topic, data=None): + self.emit_event(topic, data, "controller") def _get_host_is_valid(self): return self._host_is_valid @@ -1738,6 +1652,7 @@ class PublisherController(BasePublisherController): self._resetting_instances = False # Cacher of avalon documents + self._hierarchy_model = HierarchyModel(self) self._asset_docs_cache = AssetDocsCache(self) @property @@ -1751,11 +1666,11 @@ class PublisherController(BasePublisherController): return self._create_context.get_current_project_name() @property - def current_asset_name(self): - """Current context asset name defined by host. + def current_folder_path(self): + """Current context folder path defined by host. Returns: - Union[str, None]: Asset name or None if asset is not set. + Union[str, None]: Folder path or None if folder is not set. """ return self._create_context.get_current_asset_name() @@ -1794,11 +1709,69 @@ class PublisherController(BasePublisherController): """Publish plugins.""" return self._create_context.publish_plugins - # --- Publish specific callbacks --- - def get_asset_docs(self): - """Get asset documents from cache for whole project.""" - return self._asset_docs_cache.get_asset_docs() + # Hierarchy model + def get_folder_items(self, project_name, sender=None): + return self._hierarchy_model.get_folder_items(project_name, sender) + def get_task_items(self, project_name, folder_id, sender=None): + return self._hierarchy_model.get_task_items( + project_name, folder_id, sender + ) + + def get_folder_entity(self, project_name, folder_id): + return self._hierarchy_model.get_folder_entity( + project_name, folder_id + ) + + def get_task_entity(self, project_name, task_id): + return self._hierarchy_model.get_task_entity(project_name, task_id) + + # Publisher custom method + def get_folder_id_from_path(self, folder_path): + if not folder_path: + return None + folder_item = self._hierarchy_model.get_folder_item_by_path( + self.project_name, folder_path + ) + if folder_item: + return folder_item.entity_id + return None + + def get_task_names_by_folder_paths(self, folder_paths): + if not folder_paths: + return {} + folder_items = self._hierarchy_model.get_folder_items_by_paths( + self.project_name, folder_paths + ) + output = { + folder_path: set() + for folder_path in folder_paths + } + project_name = self.project_name + for folder_item in folder_items.values(): + task_items = self._hierarchy_model.get_task_items( + project_name, folder_item.entity_id, None + ) + output[folder_item.path] = { + task_item.name + for task_item in task_items + } + + return output + + def are_folder_paths_valid(self, folder_paths): + if not folder_paths: + return True + folder_paths = set(folder_paths) + folder_items = self._hierarchy_model.get_folder_items_by_paths( + self.project_name, folder_paths + ) + for folder_item in folder_items.values(): + if folder_item is None: + return False + return True + + # --- Publish specific callbacks --- def get_context_title(self): """Get context title for artist shown at the top of main window.""" @@ -1807,38 +1780,26 @@ class PublisherController(BasePublisherController): context_title = self._host.get_context_title() if context_title is None: - context_title = os.environ.get("AVALON_APP_NAME") + context_title = os.environ.get("AYON_APP_NAME") if context_title is None: - context_title = os.environ.get("AVALON_APP") + context_title = os.environ.get("AYON_HOST_NAME") return context_title - def get_asset_hierarchy(self): - """Prepare asset documents into hierarchy.""" - - return self._asset_docs_cache.get_asset_hierarchy() - - def get_task_names_by_asset_names(self, asset_names): - """Prepare task names by asset name.""" - task_names_by_asset_name = ( - self._asset_docs_cache.get_task_names_by_asset_name() - ) - result = {} - for asset_name in asset_names: - result[asset_name] = set( - task_names_by_asset_name.get(asset_name) or [] - ) - return result - - def get_existing_subset_names(self, asset_name): + def get_existing_product_names(self, folder_path): + if not folder_path: + return None project_name = self.project_name - asset_doc = self._asset_docs_cache.get_asset_by_name(asset_name) - if not asset_doc: + folder_item = self._hierarchy_model.get_folder_item_by_path( + project_name, folder_path + ) + if not folder_item: return None - asset_id = asset_doc["_id"] subset_docs = get_subsets( - project_name, asset_ids=[asset_id], fields=["name"] + project_name, + asset_ids=[folder_item.entity_id], + fields=["name"] ) return { subset_doc["name"] @@ -1858,6 +1819,7 @@ class PublisherController(BasePublisherController): # Reset avalon context self._create_context.reset_current_context() + self._hierarchy_model.reset() self._asset_docs_cache.reset() self._reset_plugins() @@ -1927,7 +1889,7 @@ class PublisherController(BasePublisherController): self._emit_event( "convertors.find.failed", { - "title": "Collection of unsupported subset failed", + "title": "Collection of unsupported product failed", "failed_info": exc.failed_info } ) @@ -2069,35 +2031,37 @@ class PublisherController(BasePublisherController): )) return output - def get_subset_name( + def get_product_name( self, creator_identifier, variant, task_name, - asset_name, + folder_path, instance_id=None ): - """Get subset name based on passed data. + """Get product name based on passed data. Args: creator_identifier (str): Identifier of creator which should be - responsible for subset name creation. + responsible for product name creation. variant (str): Variant value from user's input. task_name (str): Name of task for which is instance created. - asset_name (str): Name of asset for which is instance created. - instance_id (Union[str, None]): Existing instance id when subset + folder_path (str): Folder path for which is instance created. + instance_id (Union[str, None]): Existing instance id when product name is updated. """ creator = self._creators[creator_identifier] project_name = self.project_name - asset_doc = self._asset_docs_cache.get_full_asset_by_name(asset_name) + asset_doc = self._asset_docs_cache.get_asset_doc_by_folder_path( + folder_path + ) instance = None if instance_id: instance = self.instances[instance_id] - return creator.get_subset_name( - variant, task_name, asset_doc, project_name, instance=instance + return creator.get_product_name( + project_name, asset_doc, task_name, variant, instance=instance ) def trigger_convertor_items(self, convertor_identifiers): @@ -2133,14 +2097,14 @@ class PublisherController(BasePublisherController): self.reset() def create( - self, creator_identifier, subset_name, instance_data, options + self, creator_identifier, product_name, instance_data, options ): """Trigger creation and refresh of instances in UI.""" success = True try: self._create_context.create_with_unified_error( - creator_identifier, subset_name, instance_data, options + creator_identifier, product_name, instance_data, options ) except CreatorsOperationFailed as exc: @@ -2335,7 +2299,11 @@ class PublisherController(BasePublisherController): "title": "Action failed", "message": "Action failed.", "traceback": "".join( - traceback.format_exception(exception) + traceback.format_exception( + type(exception), + exception, + exception.__traceback__ + ) ), "label": action.__name__, "identifier": action.id diff --git a/client/ayon_core/tools/publisher/control_qt.py b/client/ayon_core/tools/publisher/control_qt.py index 3d56c08131..ee08899cac 100644 --- a/client/ayon_core/tools/publisher/control_qt.py +++ b/client/ayon_core/tools/publisher/control_qt.py @@ -212,13 +212,13 @@ class QtRemotePublishController(BasePublisherController): pass @abstractproperty - def current_asset_name(self): - """Current context asset name from client. + def current_folder_path(self): + """Current context folder path from host. Returns: - Union[str, None]: Name of asset. - """ + Union[str, None]: Folder path. + """ pass @abstractproperty @@ -251,16 +251,7 @@ class QtRemotePublishController(BasePublisherController): pass - def get_asset_docs(self): - pass - - def get_asset_hierarchy(self): - pass - - def get_task_names_by_asset_names(self, asset_names): - pass - - def get_existing_subset_names(self, asset_name): + def get_existing_product_names(self, folder_path): pass @property @@ -300,23 +291,23 @@ class QtRemotePublishController(BasePublisherController): pass @abstractmethod - def get_subset_name( + def get_product_name( self, creator_identifier, variant, task_name, - asset_name, + folder_path, instance_id=None ): - """Get subset name based on passed data. + """Get product name based on passed data. Args: creator_identifier (str): Identifier of creator which should be - responsible for subset name creation. + responsible for product name creation. variant (str): Variant value from user's input. task_name (str): Name of task for which is instance created. - asset_name (str): Name of asset for which is instance created. - instance_id (Union[str, None]): Existing instance id when subset + folder_path (str): Folder path for which is instance created. + instance_id (Union[str, None]): Existing instance id when product name is updated. """ @@ -324,7 +315,7 @@ class QtRemotePublishController(BasePublisherController): @abstractmethod def create( - self, creator_identifier, subset_name, instance_data, options + self, creator_identifier, product_name, instance_data, options ): """Trigger creation by creator identifier. @@ -332,9 +323,9 @@ class QtRemotePublishController(BasePublisherController): Args: creator_identifier (str): Identifier of Creator plugin. - subset_name (str): Calculated subset name. + product_name (str): Calculated product name. instance_data (Dict[str, Any]): Base instance data with variant, - asset name and task name. + folder path and task name. options (Dict[str, Any]): Data from pre-create attributes. """ diff --git a/client/ayon_core/tools/publisher/widgets/assets_widget.py b/client/ayon_core/tools/publisher/widgets/assets_widget.py deleted file mode 100644 index 8a72c03e8b..0000000000 --- a/client/ayon_core/tools/publisher/widgets/assets_widget.py +++ /dev/null @@ -1,360 +0,0 @@ -import collections - -from qtpy import QtWidgets, QtCore, QtGui - -from ayon_core.tools.utils import ( - PlaceholderLineEdit, - RecursiveSortFilterProxyModel, - get_asset_icon, -) -from ayon_core.tools.utils.assets_widget import ( - SingleSelectAssetsWidget, - ASSET_ID_ROLE, - ASSET_NAME_ROLE, - ASSET_PATH_ROLE, -) - - -class CreateWidgetAssetsWidget(SingleSelectAssetsWidget): - current_context_required = QtCore.Signal() - header_height_changed = QtCore.Signal(int) - - def __init__(self, controller, parent): - self._controller = controller - super(CreateWidgetAssetsWidget, self).__init__(None, parent) - - self.set_refresh_btn_visibility(False) - self.set_current_asset_btn_visibility(False) - - self._last_selection = None - self._enabled = None - - self._last_filter_height = None - - def get_selected_asset_name(self): - selection_model = self._view.selectionModel() - indexes = selection_model.selectedRows() - for index in indexes: - return index.data(ASSET_PATH_ROLE) - return None - - def _check_header_height(self): - """Catch header height changes. - - Label on top of creaters should have same height so Creators view has - same offset. - """ - height = self.header_widget.height() - if height != self._last_filter_height: - self._last_filter_height = height - self.header_height_changed.emit(height) - - def resizeEvent(self, event): - super(CreateWidgetAssetsWidget, self).resizeEvent(event) - self._check_header_height() - - def showEvent(self, event): - super(CreateWidgetAssetsWidget, self).showEvent(event) - self._check_header_height() - - def _on_current_asset_click(self): - self.current_context_required.emit() - - def set_enabled(self, enabled): - if self._enabled == enabled: - return - self._enabled = enabled - if not enabled: - self._last_selection = self.get_selected_asset_id() - self._clear_selection() - elif self._last_selection is not None: - self.select_asset(self._last_selection) - - def _select_indexes(self, *args, **kwargs): - super(CreateWidgetAssetsWidget, self)._select_indexes(*args, **kwargs) - if self._enabled: - return - self._last_selection = self.get_selected_asset_id() - self._clear_selection() - - def update_current_asset(self): - # Hide set current asset if there is no one - asset_name = self._get_current_session_asset() - self.set_current_asset_btn_visibility(bool(asset_name)) - - def _get_current_session_asset(self): - return self._controller.current_asset_name - - def _create_source_model(self): - return AssetsHierarchyModel(self._controller) - - def _refresh_model(self): - self._model.reset() - self._on_model_refresh(self._model.rowCount() > 0) - - -class AssetsHierarchyModel(QtGui.QStandardItemModel): - """Assets hierarchy model. - - For selecting asset for which an instance should be created. - - Uses controller to load asset hierarchy. All asset documents are stored by - their parents. - """ - - def __init__(self, controller): - super(AssetsHierarchyModel, self).__init__() - self._controller = controller - - self._items_by_name = {} - self._items_by_path = {} - self._items_by_asset_id = {} - - def reset(self): - self.clear() - - self._items_by_name = {} - self._items_by_path = {} - self._items_by_asset_id = {} - assets_by_parent_id = self._controller.get_asset_hierarchy() - - items_by_name = {} - items_by_path = {} - items_by_asset_id = {} - _queue = collections.deque() - _queue.append((self.invisibleRootItem(), None, None)) - while _queue: - parent_item, parent_id, parent_path = _queue.popleft() - children = assets_by_parent_id.get(parent_id) - if not children: - continue - - children_by_name = { - child["name"]: child - for child in children - } - items = [] - for name in sorted(children_by_name.keys()): - child = children_by_name[name] - child_id = child["_id"] - if parent_path: - child_path = "{}/{}".format(parent_path, name) - else: - child_path = "/{}".format(name) - - has_children = bool(assets_by_parent_id.get(child_id)) - icon = get_asset_icon(child, has_children) - - item = QtGui.QStandardItem(name) - item.setFlags( - QtCore.Qt.ItemIsEnabled - | QtCore.Qt.ItemIsSelectable - ) - item.setData(icon, QtCore.Qt.DecorationRole) - item.setData(child_id, ASSET_ID_ROLE) - item.setData(name, ASSET_NAME_ROLE) - item.setData(child_path, ASSET_PATH_ROLE) - - items_by_name[name] = item - items_by_path[child_path] = item - items_by_asset_id[child_id] = item - items.append(item) - _queue.append((item, child_id, child_path)) - - parent_item.appendRows(items) - - self._items_by_name = items_by_name - self._items_by_path = items_by_path - self._items_by_asset_id = items_by_asset_id - - def get_index_by_asset_id(self, asset_id): - item = self._items_by_asset_id.get(asset_id) - if item is not None: - return item.index() - return QtCore.QModelIndex() - - def get_index_by_asset_name(self, asset_name): - item = self._items_by_path.get(asset_name) - if item is None: - item = self._items_by_name.get(asset_name) - - if item is None: - return QtCore.QModelIndex() - return item.index() - - def name_is_valid(self, item_name): - return item_name in self._items_by_path - - -class AssetDialogView(QtWidgets.QTreeView): - double_clicked = QtCore.Signal(QtCore.QModelIndex) - - def mouseDoubleClickEvent(self, event): - index = self.indexAt(event.pos()) - if index.isValid(): - self.double_clicked.emit(index) - event.accept() - - -class AssetsDialog(QtWidgets.QDialog): - """Dialog to select asset for a context of instance.""" - - def __init__(self, controller, parent): - super(AssetsDialog, self).__init__(parent) - self.setWindowTitle("Select asset") - - model = AssetsHierarchyModel(controller) - proxy_model = RecursiveSortFilterProxyModel() - proxy_model.setSourceModel(model) - proxy_model.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive) - - filter_input = PlaceholderLineEdit(self) - filter_input.setPlaceholderText("Filter folders..") - - asset_view = AssetDialogView(self) - asset_view.setModel(proxy_model) - asset_view.setHeaderHidden(True) - asset_view.setFrameShape(QtWidgets.QFrame.NoFrame) - asset_view.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers) - asset_view.setAlternatingRowColors(True) - asset_view.setSelectionBehavior(QtWidgets.QTreeView.SelectRows) - asset_view.setAllColumnsShowFocus(True) - - ok_btn = QtWidgets.QPushButton("OK", self) - cancel_btn = QtWidgets.QPushButton("Cancel", self) - - btns_layout = QtWidgets.QHBoxLayout() - btns_layout.addStretch(1) - btns_layout.addWidget(ok_btn) - btns_layout.addWidget(cancel_btn) - - layout = QtWidgets.QVBoxLayout(self) - layout.addWidget(filter_input, 0) - layout.addWidget(asset_view, 1) - layout.addLayout(btns_layout, 0) - - controller.event_system.add_callback( - "controller.reset.finished", self._on_controller_reset - ) - - asset_view.double_clicked.connect(self._on_ok_clicked) - filter_input.textChanged.connect(self._on_filter_change) - ok_btn.clicked.connect(self._on_ok_clicked) - cancel_btn.clicked.connect(self._on_cancel_clicked) - - self._filter_input = filter_input - self._ok_btn = ok_btn - self._cancel_btn = cancel_btn - - self._model = model - self._proxy_model = proxy_model - - self._asset_view = asset_view - - self._selected_asset = None - # Soft refresh is enabled - # - reset will happen at all cost if soft reset is enabled - # - adds ability to call reset on multiple places without repeating - self._soft_reset_enabled = True - - self._first_show = True - self._default_height = 500 - - def _on_first_show(self): - center = self.rect().center() - size = self.size() - size.setHeight(self._default_height) - - self.resize(size) - new_pos = self.mapToGlobal(center) - new_pos.setX(new_pos.x() - int(self.width() / 2)) - new_pos.setY(new_pos.y() - int(self.height() / 2)) - self.move(new_pos) - - def _on_controller_reset(self): - # Change reset enabled so model is reset on show event - self._soft_reset_enabled = True - - def showEvent(self, event): - """Refresh asset model on show.""" - super(AssetsDialog, self).showEvent(event) - if self._first_show: - self._first_show = False - self._on_first_show() - # Refresh on show - self.reset(False) - - def reset(self, force=True): - """Reset asset model.""" - if not force and not self._soft_reset_enabled: - return - - if self._soft_reset_enabled: - self._soft_reset_enabled = False - - self._model.reset() - - def name_is_valid(self, name): - """Is asset name valid. - - Args: - name(str): Asset name that should be checked. - """ - # Make sure we're reset - self.reset(False) - # Valid the name by model - return self._model.name_is_valid(name) - - def _on_filter_change(self, text): - """Trigger change of filter of assets.""" - self._proxy_model.setFilterFixedString(text) - - def _on_cancel_clicked(self): - self.done(0) - - def _on_ok_clicked(self): - index = self._asset_view.currentIndex() - asset_name = None - if index.isValid(): - asset_name = index.data(ASSET_PATH_ROLE) - self._selected_asset = asset_name - self.done(1) - - def set_selected_assets(self, asset_names): - """Change preselected asset before showing the dialog. - - This also resets model and clean filter. - """ - self.reset(False) - self._asset_view.collapseAll() - self._filter_input.setText("") - - indexes = [] - for asset_name in asset_names: - index = self._model.get_index_by_asset_name(asset_name) - if index.isValid(): - indexes.append(index) - - if not indexes: - return - - index_deque = collections.deque() - for index in indexes: - index_deque.append(index) - - all_indexes = [] - while index_deque: - index = index_deque.popleft() - all_indexes.append(index) - - parent_index = index.parent() - if parent_index.isValid(): - index_deque.append(parent_index) - - for index in all_indexes: - proxy_index = self._proxy_model.mapFromSource(index) - self._asset_view.expand(proxy_index) - - def get_selected_asset(self): - """Get selected asset name.""" - return self._selected_asset diff --git a/client/ayon_core/tools/publisher/widgets/card_view_widgets.py b/client/ayon_core/tools/publisher/widgets/card_view_widgets.py index 3396110121..47c5399cf7 100644 --- a/client/ayon_core/tools/publisher/widgets/card_view_widgets.py +++ b/client/ayon_core/tools/publisher/widgets/card_view_widgets.py @@ -179,7 +179,7 @@ class ConvertorItemsGroupWidget(BaseGroupWidget): # Remove instance widgets that are not in passed instances self._remove_all_except(items_by_id.keys()) - # Sort instances by subset name + # Sort instances by product name sorted_labels = list(sorted(items_by_label.keys())) # Add new instances to widget @@ -226,24 +226,24 @@ class InstanceGroupWidget(BaseGroupWidget): CreateContext. """ - # Store instances by id and by subset name + # Store instances by id and by product name instances_by_id = {} - instances_by_subset_name = collections.defaultdict(list) + instances_by_product_name = collections.defaultdict(list) for instance in instances: instances_by_id[instance.id] = instance - subset_name = instance["subset"] - instances_by_subset_name[subset_name].append(instance) + product_name = instance["productName"] + instances_by_product_name[product_name].append(instance) # Remove instance widgets that are not in passed instances self._remove_all_except(instances_by_id.keys()) - # Sort instances by subset name - sorted_subset_names = list(sorted(instances_by_subset_name.keys())) + # Sort instances by product name + sorted_product_names = list(sorted(instances_by_product_name.keys())) # Add new instances to widget widget_idx = 1 - for subset_names in sorted_subset_names: - for instance in instances_by_subset_name[subset_names]: + for product_names in sorted_product_names: + for instance in instances_by_product_name[product_names]: if instance.id in self._widgets_by_id: widget = self._widgets_by_id[instance.id] widget.update_instance(instance) @@ -326,7 +326,7 @@ class ContextCardWidget(CardWidget): self._group_identifier = CONTEXT_GROUP icon_widget = PublishPixmapLabel(None, self) - icon_widget.setObjectName("FamilyIconLabel") + icon_widget.setObjectName("ProductTypeIconLabel") label_widget = QtWidgets.QLabel(CONTEXT_LABEL, self) @@ -357,7 +357,7 @@ class ConvertorItemCardWidget(CardWidget): self._group_identifier = CONVERTOR_ITEM_GROUP icon_widget = IconValuePixmapLabel("fa.magic", self) - icon_widget.setObjectName("FamilyIconLabel") + icon_widget.setObjectName("ProductTypeIconLabel") label_widget = QtWidgets.QLabel(item.label, self) @@ -391,12 +391,12 @@ class InstanceCardWidget(CardWidget): self.instance = instance - self._last_subset_name = None + self._last_product_name = None self._last_variant = None self._last_label = None icon_widget = IconValuePixmapLabel(group_icon, self) - icon_widget.setObjectName("FamilyIconLabel") + icon_widget.setObjectName("ProductTypeIconLabel") context_warning = ContextWarningLabel(self) icon_layout = QtWidgets.QHBoxLayout() @@ -475,19 +475,19 @@ class InstanceCardWidget(CardWidget): self._icon_widget.setVisible(valid) self._context_warning.setVisible(not valid) - def _update_subset_name(self): + def _update_product_name(self): variant = self.instance["variant"] - subset_name = self.instance["subset"] + product_name = self.instance["productName"] label = self.instance.label if ( variant == self._last_variant - and subset_name == self._last_subset_name + and product_name == self._last_product_name and label == self._last_label ): return self._last_variant = variant - self._last_subset_name = subset_name + self._last_product_name = product_name self._last_label = label # Make `variant` bold label = html_escape(self.instance.label) @@ -506,7 +506,7 @@ class InstanceCardWidget(CardWidget): def update_instance_values(self): """Update instance data""" - self._update_subset_name() + self._update_product_name() self.set_active(self.instance["active"]) self._validate_context() diff --git a/client/ayon_core/tools/publisher/widgets/create_context_widgets.py b/client/ayon_core/tools/publisher/widgets/create_context_widgets.py new file mode 100644 index 0000000000..d65a2ace8d --- /dev/null +++ b/client/ayon_core/tools/publisher/widgets/create_context_widgets.py @@ -0,0 +1,296 @@ +from qtpy import QtWidgets, QtCore, QtGui + +from ayon_core.lib.events import QueuedEventSystem +from ayon_core.tools.utils import PlaceholderLineEdit, GoToCurrentButton + +from ayon_core.tools.ayon_utils.models import HierarchyExpectedSelection +from ayon_core.tools.ayon_utils.widgets import FoldersWidget, TasksWidget + + +class CreateSelectionModel(object): + """Model handling selection changes. + + Triggering events: + - "selection.project.changed" + - "selection.folder.changed" + - "selection.task.changed" + """ + + event_source = "publisher.create.selection.model" + + def __init__(self, controller): + self._controller = controller + + self._project_name = None + self._folder_id = None + self._task_name = None + self._task_id = None + + def get_selected_project_name(self): + return self._project_name + + def set_selected_project(self, project_name): + if project_name == self._project_name: + return + + self._project_name = project_name + self._controller.emit_event( + "selection.project.changed", + {"project_name": project_name}, + self.event_source + ) + + def get_selected_folder_id(self): + return self._folder_id + + def set_selected_folder(self, folder_id): + if folder_id == self._folder_id: + return + + self._folder_id = folder_id + self._controller.emit_event( + "selection.folder.changed", + { + "project_name": self._project_name, + "folder_id": folder_id, + }, + self.event_source + ) + + def get_selected_task_name(self): + return self._task_name + + def get_selected_task_id(self): + return self._task_id + + def set_selected_task(self, task_id, task_name): + if task_id == self._task_id: + return + + self._task_name = task_name + self._task_id = task_id + self._controller.emit_event( + "selection.task.changed", + { + "project_name": self._project_name, + "folder_id": self._folder_id, + "task_name": task_name, + "task_id": task_id, + }, + self.event_source + ) + + +class CreateHierarchyController: + """Controller for hierarchy widgets. + + Helper for handling hierarchy widgets in create tab. It handles selection + of folder and task to properly propagate it to other widgets. + + At the same time handles expected selection so can pre-select folder and + task based on current context. + + Args: + controller (PublisherController): Publisher controller. + + """ + def __init__(self, controller): + self._event_system = QueuedEventSystem() + self._controller = controller + self._selection_model = CreateSelectionModel(self) + self._expected_selection = HierarchyExpectedSelection( + self, handle_project=False + ) + + # Events system + @property + def event_system(self): + return self._event_system + + def emit_event(self, topic, data=None, source=None): + """Use implemented event system to trigger event.""" + + if data is None: + data = {} + self.event_system.emit(topic, data, source) + + def register_event_callback(self, topic, callback): + self.event_system.add_callback(topic, callback) + + def get_project_name(self): + return self._controller.project_name + + def get_folder_items(self, project_name, sender=None): + return self._controller.get_folder_items(project_name, sender) + + def get_task_items(self, project_name, folder_id, sender=None): + return self._controller.get_task_items( + project_name, folder_id, sender + ) + + # Selection model + def set_selected_project(self, project_name): + self._selection_model.set_selected_project(project_name) + + def set_selected_folder(self, folder_id): + self._selection_model.set_selected_folder(folder_id) + + def set_selected_task(self, task_id, task_name): + self._selection_model.set_selected_task(task_id, task_name) + + # Expected selection + def get_expected_selection_data(self): + return self._expected_selection.get_expected_selection_data() + + def set_expected_selection(self, project_name, folder_id, task_name): + self._expected_selection.set_expected_selection( + project_name, folder_id, task_name + ) + + def expected_folder_selected(self, folder_id): + self._expected_selection.expected_folder_selected(folder_id) + + def expected_task_selected(self, folder_id, task_name): + self._expected_selection.expected_task_selected(folder_id, task_name) + + +class CreateContextWidget(QtWidgets.QWidget): + folder_changed = QtCore.Signal() + task_changed = QtCore.Signal() + + def __init__(self, controller, parent): + super(CreateContextWidget, self).__init__(parent) + + self._controller = controller + self._enabled = True + self._last_project_name = None + self._last_folder_id = None + self._last_selected_task_name = None + + headers_widget = QtWidgets.QWidget(self) + + folder_filter_input = PlaceholderLineEdit(headers_widget) + folder_filter_input.setPlaceholderText("Filter folders..") + + current_context_btn = GoToCurrentButton(headers_widget) + current_context_btn.setToolTip("Go to current context") + current_context_btn.setVisible(False) + + headers_layout = QtWidgets.QHBoxLayout(headers_widget) + headers_layout.setContentsMargins(0, 0, 0, 0) + headers_layout.addWidget(folder_filter_input, 1) + headers_layout.addWidget(current_context_btn, 0) + + hierarchy_controller = CreateHierarchyController(controller) + + folders_widget = FoldersWidget( + hierarchy_controller, self, handle_expected_selection=True + ) + folders_widget.set_deselectable(True) + + tasks_widget = TasksWidget( + hierarchy_controller, self, handle_expected_selection=True + ) + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.setContentsMargins(0, 0, 0, 0) + main_layout.setSpacing(0) + main_layout.addWidget(headers_widget, 0) + main_layout.addWidget(folders_widget, 2) + main_layout.addWidget(tasks_widget, 1) + + folders_widget.selection_changed.connect(self._on_folder_change) + tasks_widget.selection_changed.connect(self._on_task_change) + current_context_btn.clicked.connect(self._on_current_context_click) + folder_filter_input.textChanged.connect(self._on_folder_filter_change) + + self._folder_filter_input = folder_filter_input + self._current_context_btn = current_context_btn + self._folders_widget = folders_widget + self._tasks_widget = tasks_widget + self._hierarchy_controller = hierarchy_controller + + def get_selected_folder_id(self): + return self._folders_widget.get_selected_folder_id() + + def get_selected_folder_path(self): + return self._folders_widget.get_selected_folder_path() + + def get_selected_task_name(self): + return self._tasks_widget.get_selected_task_name() + + def get_selected_task_type(self): + return self._tasks_widget.get_selected_task_type() + + def update_current_context_btn(self): + # Hide set current folder if there is no one + folder_path = self._controller.current_folder_path + self._current_context_btn.setVisible(bool(folder_path)) + + def set_selected_context(self, folder_id, task_name): + self._hierarchy_controller.set_expected_selection( + self._controller.project_name, + folder_id, + task_name + ) + + def is_enabled(self): + return self._enabled + + def set_enabled(self, enabled): + if enabled is self._enabled: + return + + self.setEnabled(enabled) + self._enabled = enabled + + if not enabled: + self._last_folder_id = self.get_selected_folder_id() + self._folders_widget.set_selected_folder(None) + last_selected_task_name = self.get_selected_task_name() + if last_selected_task_name: + self._last_selected_task_name = last_selected_task_name + self._clear_selection() + + elif self._last_selected_task_name is not None: + self._hierarchy_controller.set_expected_selection( + self._last_project_name, + self._last_folder_id, + self._last_selected_task_name + ) + + def refresh(self): + self._last_project_name = self._controller.project_name + folder_id = self._last_folder_id + task_name = self._last_selected_task_name + if folder_id is None: + folder_path = self._controller.current_folder_path + folder_id = self._controller.get_folder_id_from_path(folder_path) + task_name = self._controller.current_task_name + self._hierarchy_controller.set_selected_project( + self._last_project_name + ) + self._folders_widget.set_project_name(self._last_project_name) + self._hierarchy_controller.set_expected_selection( + self._last_project_name, folder_id, task_name + ) + + def _clear_selection(self): + self._folders_widget.set_selected_folder(None) + + def _on_folder_change(self): + self.folder_changed.emit() + + def _on_task_change(self): + self.task_changed.emit() + + def _on_current_context_click(self): + folder_path = self._controller.current_folder_path + task_name = self._controller.current_task_name + folder_id = self._controller.get_folder_id_from_path(folder_path) + self._hierarchy_controller.set_expected_selection( + self._last_project_name, folder_id, task_name + ) + + def _on_folder_filter_change(self, text): + self._folders_widget.set_name_filter(text) diff --git a/client/ayon_core/tools/publisher/widgets/create_widget.py b/client/ayon_core/tools/publisher/widgets/create_widget.py index 12135c6891..2e4ca34138 100644 --- a/client/ayon_core/tools/publisher/widgets/create_widget.py +++ b/client/ayon_core/tools/publisher/widgets/create_widget.py @@ -3,7 +3,7 @@ import re from qtpy import QtWidgets, QtCore, QtGui from ayon_core.pipeline.create import ( - SUBSET_NAME_ALLOWED_SYMBOLS, + PRODUCT_NAME_ALLOWED_SYMBOLS, PRE_CREATE_THUMBNAIL_KEY, DEFAULT_VARIANT_VALUE, TaskNotSetError, @@ -14,12 +14,11 @@ from .widgets import ( IconValuePixmapLabel, CreateBtn, ) -from .assets_widget import CreateWidgetAssetsWidget -from .tasks_widget import CreateWidgetTasksWidget +from .create_context_widgets import CreateContextWidget from .precreate_widget import PreCreateWidget from ..constants import ( VARIANT_TOOLTIP, - FAMILY_ROLE, + PRODUCT_TYPE_ROLE, CREATOR_IDENTIFIER_ROLE, CREATOR_THUMBNAIL_ENABLED_ROLE, CREATOR_SORT_ROLE, @@ -45,13 +44,13 @@ class CreatorShortDescWidget(QtWidgets.QWidget): # --- Short description widget --- icon_widget = IconValuePixmapLabel(None, self) - icon_widget.setObjectName("FamilyIconLabel") + icon_widget.setObjectName("ProductTypeIconLabel") # --- Short description inputs --- short_desc_input_widget = QtWidgets.QWidget(self) - family_label = QtWidgets.QLabel(short_desc_input_widget) - family_label.setAlignment( + product_type_label = QtWidgets.QLabel(short_desc_input_widget) + product_type_label.setAlignment( QtCore.Qt.AlignBottom | QtCore.Qt.AlignLeft ) @@ -64,7 +63,7 @@ class CreatorShortDescWidget(QtWidgets.QWidget): short_desc_input_widget ) short_desc_input_layout.setSpacing(0) - short_desc_input_layout.addWidget(family_label) + short_desc_input_layout.addWidget(product_type_label) short_desc_input_layout.addWidget(description_label) # -------------------------------- @@ -75,13 +74,13 @@ class CreatorShortDescWidget(QtWidgets.QWidget): # -------------------------------- self._icon_widget = icon_widget - self._family_label = family_label + self._product_type_label = product_type_label self._description_label = description_label def set_creator_item(self, creator_item=None): if not creator_item: self._icon_widget.set_icon_def(None) - self._family_label.setText("") + self._product_type_label.setText("") self._description_label.setText("") return @@ -89,8 +88,8 @@ class CreatorShortDescWidget(QtWidgets.QWidget): description = creator_item.description or "" self._icon_widget.set_icon_def(plugin_icon) - self._family_label.setText("{}".format(creator_item.family)) - self._family_label.setTextInteractionFlags(QtCore.Qt.NoTextInteraction) + self._product_type_label.setText("{}".format(creator_item.product_type)) + self._product_type_label.setTextInteractionFlags(QtCore.Qt.NoTextInteraction) self._description_label.setText(description) @@ -109,28 +108,19 @@ class CreateWidget(QtWidgets.QWidget): self._controller = controller - self._asset_name = None - self._subset_names = None + self._folder_path = None + self._product_names = None self._selected_creator = None self._prereq_available = False - name_pattern = "^[{}]*$".format(SUBSET_NAME_ALLOWED_SYMBOLS) + name_pattern = "^[{}]*$".format(PRODUCT_NAME_ALLOWED_SYMBOLS) self._name_pattern = name_pattern self._compiled_name_pattern = re.compile(name_pattern) main_splitter_widget = QtWidgets.QSplitter(self) - context_widget = QtWidgets.QWidget(main_splitter_widget) - - assets_widget = CreateWidgetAssetsWidget(controller, context_widget) - tasks_widget = CreateWidgetTasksWidget(controller, context_widget) - - context_layout = QtWidgets.QVBoxLayout(context_widget) - context_layout.setContentsMargins(0, 0, 0, 0) - context_layout.setSpacing(0) - context_layout.addWidget(assets_widget, 2) - context_layout.addWidget(tasks_widget, 1) + context_widget = CreateContextWidget(controller, main_splitter_widget) # --- Creators view --- creators_widget = QtWidgets.QWidget(main_splitter_widget) @@ -164,19 +154,19 @@ class CreateWidget(QtWidgets.QWidget): # --- Creator attr defs --- creators_attrs_widget = QtWidgets.QWidget(creators_splitter) - # Top part - variant / subset name + thumbnail + # Top part - variant / product name + thumbnail creators_attrs_top = QtWidgets.QWidget(creators_attrs_widget) - # Basics - variant / subset name + # Basics - variant / product name creator_basics_widget = ResizeControlWidget(creators_attrs_top) - variant_subset_label = QtWidgets.QLabel( + product_variant_label = QtWidgets.QLabel( "Create options", creator_basics_widget ) - variant_subset_widget = QtWidgets.QWidget(creator_basics_widget) - # Variant and subset input - variant_widget = ResizeControlWidget(variant_subset_widget) + product_variant_widget = QtWidgets.QWidget(creator_basics_widget) + # Variant and product input + variant_widget = ResizeControlWidget(product_variant_widget) variant_widget.setObjectName("VariantInputsWidget") variant_input = QtWidgets.QLineEdit(variant_widget) @@ -196,20 +186,20 @@ class CreateWidget(QtWidgets.QWidget): variant_layout.addWidget(variant_input, 1) variant_layout.addWidget(variant_hints_btn, 0, QtCore.Qt.AlignVCenter) - subset_name_input = QtWidgets.QLineEdit(variant_subset_widget) - subset_name_input.setEnabled(False) + product_name_input = QtWidgets.QLineEdit(product_variant_widget) + product_name_input.setEnabled(False) - variant_subset_layout = QtWidgets.QFormLayout(variant_subset_widget) - variant_subset_layout.setContentsMargins(0, 0, 0, 0) - variant_subset_layout.setHorizontalSpacing(INPUTS_LAYOUT_HSPACING) - variant_subset_layout.setVerticalSpacing(INPUTS_LAYOUT_VSPACING) - variant_subset_layout.addRow("Variant", variant_widget) - variant_subset_layout.addRow("Product", subset_name_input) + product_variant_layout = QtWidgets.QFormLayout(product_variant_widget) + product_variant_layout.setContentsMargins(0, 0, 0, 0) + product_variant_layout.setHorizontalSpacing(INPUTS_LAYOUT_HSPACING) + product_variant_layout.setVerticalSpacing(INPUTS_LAYOUT_VSPACING) + product_variant_layout.addRow("Variant", variant_widget) + product_variant_layout.addRow("Product", product_name_input) creator_basics_layout = QtWidgets.QVBoxLayout(creator_basics_widget) creator_basics_layout.setContentsMargins(0, 0, 0, 0) - creator_basics_layout.addWidget(variant_subset_label, 0) - creator_basics_layout.addWidget(variant_subset_widget, 0) + creator_basics_layout.addWidget(product_variant_label, 0) + creator_basics_layout.addWidget(product_variant_widget, 0) thumbnail_widget = ThumbnailWidget(controller, creators_attrs_top) @@ -279,11 +269,8 @@ class CreateWidget(QtWidgets.QWidget): ) variant_hints_btn.clicked.connect(self._on_variant_btn_click) variant_hints_menu.triggered.connect(self._on_variant_action) - assets_widget.selection_changed.connect(self._on_asset_change) - assets_widget.current_context_required.connect( - self._on_current_session_context_request - ) - tasks_widget.task_changed.connect(self._on_task_change) + context_widget.folder_changed.connect(self._on_folder_change) + context_widget.task_changed.connect(self._on_task_change) thumbnail_widget.thumbnail_created.connect(self._on_thumbnail_create) thumbnail_widget.thumbnail_cleared.connect(self._on_thumbnail_clear) @@ -299,10 +286,8 @@ class CreateWidget(QtWidgets.QWidget): self._creators_splitter = creators_splitter self._context_widget = context_widget - self._assets_widget = assets_widget - self._tasks_widget = tasks_widget - self.subset_name_input = subset_name_input + self.product_name_input = product_name_input self.variant_input = variant_input self.variant_hints_btn = variant_hints_btn @@ -324,47 +309,51 @@ class CreateWidget(QtWidgets.QWidget): self._first_show = True self._last_thumbnail_path = None - self._last_current_context_asset = None + self._last_current_context_folder_path = None self._last_current_context_task = None self._use_current_context = True @property - def current_asset_name(self): - return self._controller.current_asset_name + def current_folder_path(self): + return self._controller.current_folder_path @property def current_task_name(self): return self._controller.current_task_name def _context_change_is_enabled(self): - return self._context_widget.isEnabled() + return self._context_widget.is_enabled() - def _get_asset_name(self): - asset_name = None + def _get_folder_path(self): + folder_path = None if self._context_change_is_enabled(): - asset_name = self._assets_widget.get_selected_asset_name() + folder_path = self._context_widget.get_selected_folder_path() - if asset_name is None: - asset_name = self.current_asset_name - return asset_name or None + if folder_path is None: + folder_path = self.current_folder_path + return folder_path or None + + def _get_folder_id(self): + folder_id = None + if self._context_widget.is_enabled(): + folder_id = self._context_widget.get_selected_folder_id() + return folder_id def _get_task_name(self): task_name = None if self._context_change_is_enabled(): - # Don't use selection of task if asset is not set - asset_name = self._assets_widget.get_selected_asset_name() - if asset_name: - task_name = self._tasks_widget.get_selected_task_name() + # Don't use selection of task if folder is not set + folder_path = self._context_widget.get_selected_folder_path() + if folder_path: + task_name = self._context_widget.get_selected_task_name() if not task_name: task_name = self.current_task_name return task_name def _set_context_enabled(self, enabled): - self._assets_widget.set_enabled(enabled) - self._tasks_widget.set_enabled(enabled) - check_prereq = self._context_widget.isEnabled() != enabled - self._context_widget.setEnabled(enabled) + check_prereq = self._context_widget.is_enabled() != enabled + self._context_widget.set_enabled(enabled) if check_prereq: self._invalidate_prereq() @@ -375,12 +364,12 @@ class CreateWidget(QtWidgets.QWidget): self._use_current_context = True def refresh(self): - current_asset_name = self._controller.current_asset_name + current_folder_path = self._controller.current_folder_path current_task_name = self._controller.current_task_name - # Get context before refresh to keep selection of asset and + # Get context before refresh to keep selection of folder and # task widgets - asset_name = self._get_asset_name() + folder_path = self._get_folder_path() task_name = self._get_task_name() # Replace by current context if last loaded context was @@ -388,37 +377,36 @@ class CreateWidget(QtWidgets.QWidget): if ( self._use_current_context or ( - self._last_current_context_asset - and asset_name == self._last_current_context_asset + self._last_current_context_folder_path + and folder_path == self._last_current_context_folder_path and task_name == self._last_current_context_task ) ): - asset_name = current_asset_name + folder_path = current_folder_path task_name = current_task_name # Store values for future refresh - self._last_current_context_asset = current_asset_name + self._last_current_context_folder_path = current_folder_path self._last_current_context_task = current_task_name self._use_current_context = False self._prereq_available = False - # Disable context widget so refresh of asset will use context asset - # name + # Disable context widget so refresh of folder will use context folder + # path self._set_context_enabled(False) - self._assets_widget.refresh() - # Refresh data before update of creators - self._refresh_asset() + self._context_widget.refresh() + self._refresh_product_name() + # Then refresh creators which may trigger callbacks using refreshed # data self._refresh_creators() - self._assets_widget.update_current_asset() - self._assets_widget.select_asset_by_name(asset_name) - self._tasks_widget.set_asset_name(asset_name) - self._tasks_widget.select_task_name(task_name) + folder_id = self._controller.get_folder_id_from_path(folder_path) + self._context_widget.update_current_context_btn() + self._context_widget.set_selected_context(folder_id, task_name) self._invalidate_prereq_deffered() @@ -439,9 +427,9 @@ class CreateWidget(QtWidgets.QWidget): if ( self._context_change_is_enabled() - and self._get_asset_name() is None + and self._get_folder_path() is None ): - # QUESTION how to handle invalid asset? + # QUESTION how to handle invalid folder? prereq_available = False creator_btn_tooltips.append("Context is not selected") @@ -460,27 +448,29 @@ class CreateWidget(QtWidgets.QWidget): self._on_variant_change() - def _refresh_asset(self): - asset_name = self._get_asset_name() + def _refresh_product_name(self): + folder_path = self._get_folder_path() - # Skip if asset did not change - if self._asset_name and self._asset_name == asset_name: + # Skip if folder did not change + if self._folder_path and self._folder_path == folder_path: return - # Make sure `_asset_name` and `_subset_names` variables are reset - self._asset_name = asset_name - self._subset_names = None - if asset_name is None: + # Make sure `_folder_path` and `_product_names` variables are reset + self._folder_path = folder_path + self._product_names = None + if folder_path is None: return - subset_names = self._controller.get_existing_subset_names(asset_name) + product_names = self._controller.get_existing_product_names( + folder_path + ) - self._subset_names = subset_names - if subset_names is None: - self.subset_name_input.setText("< Asset is not set >") + self._product_names = product_names + if product_names is None: + self.product_name_input.setText("< Folder is not set >") def _refresh_creators(self): - # Refresh creators and add their families to list + # Refresh creators and add their product types to list existing_items = {} old_creators = set() for row in range(self._creators_model.rowCount()): @@ -489,7 +479,7 @@ class CreateWidget(QtWidgets.QWidget): existing_items[identifier] = item old_creators.add(identifier) - # Add new families + # Add new create plugins new_creators = set() creator_items_by_identifier = self._controller.creator_items for identifier, creator_item in creator_items_by_identifier.items(): @@ -515,11 +505,11 @@ class CreateWidget(QtWidgets.QWidget): creator_item.create_allow_thumbnail, CREATOR_THUMBNAIL_ENABLED_ROLE ) - item.setData(creator_item.family, FAMILY_ROLE) + item.setData(creator_item.product_type, PRODUCT_TYPE_ROLE) if is_new: self._creators_model.appendRow(item) - # Remove families that are no more available + # Remove create plugins that are no more available for identifier in (old_creators - new_creators): item = existing_items[identifier] self._creators_model.takeRow(item.row()) @@ -545,11 +535,8 @@ class CreateWidget(QtWidgets.QWidget): # Trigger refresh only if is visible self.refresh() - def _on_asset_change(self): - self._refresh_asset() - - asset_name = self._assets_widget.get_selected_asset_name() - self._tasks_widget.set_asset_name(asset_name) + def _on_folder_change(self): + self._refresh_product_name() if self._context_change_is_enabled(): self._invalidate_prereq_deffered() @@ -564,12 +551,6 @@ class CreateWidget(QtWidgets.QWidget): def _on_thumbnail_clear(self): self._last_thumbnail_path = None - def _on_current_session_context_request(self): - self._assets_widget.set_current_session_asset() - task_name = self.current_task_name - if task_name: - self._tasks_widget.select_task_name(task_name) - def _on_creator_item_change(self, new_index, _old_index): identifier = None if new_index.isValid(): @@ -616,7 +597,7 @@ class CreateWidget(QtWidgets.QWidget): != self._context_change_is_enabled() ): self._set_context_enabled(creator_item.create_allow_context_change) - self._refresh_asset() + self._refresh_product_name() self._thumbnail_widget.setVisible( creator_item.create_allow_thumbnail @@ -641,7 +622,7 @@ class CreateWidget(QtWidgets.QWidget): self.variant_hints_menu.addAction(variant) variant_text = default_variant or DEFAULT_VARIANT_VALUE - # Make sure subset name is updated to new plugin + # Make sure product name is updated to new plugin if variant_text == self.variant_input.text(): self._on_variant_change() else: @@ -666,8 +647,8 @@ class CreateWidget(QtWidgets.QWidget): # This should probably never happen? if not self._selected_creator: - if self.subset_name_input.text(): - self.subset_name_input.setText("") + if self.product_name_input.text(): + self.product_name_input.setText("") return if variant_value is None: @@ -676,52 +657,52 @@ class CreateWidget(QtWidgets.QWidget): if not self._compiled_name_pattern.match(variant_value): self._create_btn.setEnabled(False) self._set_variant_state_property("invalid") - self.subset_name_input.setText("< Invalid variant >") + self.product_name_input.setText("< Invalid variant >") return if not self._context_change_is_enabled(): self._create_btn.setEnabled(True) self._set_variant_state_property("") - self.subset_name_input.setText("< Valid variant >") + self.product_name_input.setText("< Valid variant >") return - asset_name = self._get_asset_name() + folder_path = self._get_folder_path() task_name = self._get_task_name() creator_idenfier = self._selected_creator.identifier - # Calculate subset name with Creator plugin + # Calculate product name with Creator plugin try: - subset_name = self._controller.get_subset_name( - creator_idenfier, variant_value, task_name, asset_name + product_name = self._controller.get_product_name( + creator_idenfier, variant_value, task_name, folder_path ) except TaskNotSetError: self._create_btn.setEnabled(False) self._set_variant_state_property("invalid") - self.subset_name_input.setText("< Missing task >") + self.product_name_input.setText("< Missing task >") return - self.subset_name_input.setText(subset_name) + self.product_name_input.setText(product_name) self._create_btn.setEnabled(True) - self._validate_subset_name(subset_name, variant_value) + self._validate_product_name(product_name, variant_value) - def _validate_subset_name(self, subset_name, variant_value): - # Get all subsets of the current asset - if self._subset_names: - existing_subset_names = set(self._subset_names) + def _validate_product_name(self, product_name, variant_value): + # Get all products of the current folder + if self._product_names: + existing_product_names = set(self._product_names) else: - existing_subset_names = set() - existing_subset_names_low = set( + existing_product_names = set() + existing_product_names_low = set( _name.lower() - for _name in existing_subset_names + for _name in existing_product_names ) # Replace compare_regex = re.compile(re.sub( - variant_value, "(.+)", subset_name, flags=re.IGNORECASE + variant_value, "(.+)", product_name, flags=re.IGNORECASE )) variant_hints = set() if variant_value: - for _name in existing_subset_names: + for _name in existing_product_names: _result = compare_regex.search(_name) if _result: variant_hints |= set(_result.groups()) @@ -741,12 +722,12 @@ class CreateWidget(QtWidgets.QWidget): action = self.variant_hints_menu.addAction(variant_hint) self.variant_hints_group.addAction(action) - # Indicate subset existence + # Indicate product existence if not variant_value: property_value = "empty" - elif subset_name.lower() in existing_subset_names_low: - # validate existence of subset name with lowered text + elif product_name.lower() in existing_product_names_low: + # validate existence of product name with lowered text # - "renderMain" vs. "rendermain" mean same path item for # windows property_value = "exists" @@ -794,15 +775,15 @@ class CreateWidget(QtWidgets.QWidget): index = indexes[0] creator_identifier = index.data(CREATOR_IDENTIFIER_ROLE) - family = index.data(FAMILY_ROLE) + product_type = index.data(PRODUCT_TYPE_ROLE) variant = self.variant_input.text() - # Care about subset name only if context change is enabled - subset_name = None - asset_name = None + # Care about product name only if context change is enabled + product_name = None + folder_path = None task_name = None if self._context_change_is_enabled(): - subset_name = self.subset_name_input.text() - asset_name = self._get_asset_name() + product_name = self.product_name_input.text() + folder_path = self._get_folder_path() task_name = self._get_task_name() pre_create_data = self._pre_create_widget.current_value() @@ -814,15 +795,15 @@ class CreateWidget(QtWidgets.QWidget): # Where to define these data? # - what data show be stored? instance_data = { - "folderPath": asset_name, + "folderPath": folder_path, "task": task_name, "variant": variant, - "family": family + "productType": product_type } success = self._controller.create( creator_identifier, - subset_name, + product_name, instance_data, pre_create_data ) diff --git a/client/ayon_core/tools/publisher/widgets/folders_dialog.py b/client/ayon_core/tools/publisher/widgets/folders_dialog.py new file mode 100644 index 0000000000..8f93264b2e --- /dev/null +++ b/client/ayon_core/tools/publisher/widgets/folders_dialog.py @@ -0,0 +1,151 @@ +from qtpy import QtWidgets, QtCore, QtGui + +from ayon_core.lib.events import QueuedEventSystem +from ayon_core.tools.ayon_utils.widgets import FoldersWidget +from ayon_core.tools.utils import PlaceholderLineEdit + + +class FoldersDialogController: + def __init__(self, controller): + self._event_system = QueuedEventSystem() + self._controller = controller + + @property + def event_system(self): + return self._event_system + + def emit_event(self, topic, data=None, source=None): + """Use implemented event system to trigger event.""" + + if data is None: + data = {} + self.event_system.emit(topic, data, source) + + def register_event_callback(self, topic, callback): + self.event_system.add_callback(topic, callback) + + def get_folder_items(self, project_name, sender=None): + return self._controller.get_folder_items(project_name, sender) + + def set_selected_folder(self, folder_id): + pass + + +class FoldersDialog(QtWidgets.QDialog): + """Dialog to select folder for a context of instance.""" + + def __init__(self, controller, parent): + super(FoldersDialog, self).__init__(parent) + self.setWindowTitle("Select folder") + + filter_input = PlaceholderLineEdit(self) + filter_input.setPlaceholderText("Filter folders..") + + folders_controller = FoldersDialogController(controller) + folders_widget = FoldersWidget(folders_controller, self) + folders_widget.set_deselectable(True) + + ok_btn = QtWidgets.QPushButton("OK", self) + cancel_btn = QtWidgets.QPushButton("Cancel", self) + + btns_layout = QtWidgets.QHBoxLayout() + btns_layout.addStretch(1) + btns_layout.addWidget(ok_btn) + btns_layout.addWidget(cancel_btn) + + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(filter_input, 0) + layout.addWidget(folders_widget, 1) + layout.addLayout(btns_layout, 0) + + controller.event_system.add_callback( + "controller.reset.finished", self._on_controller_reset + ) + + folders_widget.double_clicked.connect(self._on_ok_clicked) + filter_input.textChanged.connect(self._on_filter_change) + ok_btn.clicked.connect(self._on_ok_clicked) + cancel_btn.clicked.connect(self._on_cancel_clicked) + + self._controller = controller + self._filter_input = filter_input + self._ok_btn = ok_btn + self._cancel_btn = cancel_btn + + self._folders_widget = folders_widget + + self._selected_folder_path = None + # Soft refresh is enabled + # - reset will happen at all cost if soft reset is enabled + # - adds ability to call reset on multiple places without repeating + self._soft_reset_enabled = True + + self._first_show = True + self._default_height = 500 + + def _on_first_show(self): + center = self.rect().center() + size = self.size() + size.setHeight(self._default_height) + + self.resize(size) + new_pos = self.mapToGlobal(center) + new_pos.setX(new_pos.x() - int(self.width() / 2)) + new_pos.setY(new_pos.y() - int(self.height() / 2)) + self.move(new_pos) + + def _on_controller_reset(self): + # Change reset enabled so model is reset on show event + self._soft_reset_enabled = True + + def showEvent(self, event): + """Refresh folders widget on show.""" + super(FoldersDialog, self).showEvent(event) + if self._first_show: + self._first_show = False + self._on_first_show() + # Refresh on show + self.reset(False) + + def reset(self, force=True): + """Reset widget.""" + if not force and not self._soft_reset_enabled: + return + + if self._soft_reset_enabled: + self._soft_reset_enabled = False + + self._folders_widget.set_project_name(self._controller.project_name) + + def _on_filter_change(self, text): + """Trigger change of filter of folders.""" + self._folders_widget.set_name_filter(text) + + def _on_cancel_clicked(self): + self.done(0) + + def _on_ok_clicked(self): + self._selected_folder_path = ( + self._folders_widget.get_selected_folder_path() + ) + self.done(1) + + def set_selected_folders(self, folder_paths): + """Change preselected folder before showing the dialog. + + This also resets model and clean filter. + """ + self.reset(False) + self._filter_input.setText("") + + folder_id = None + for folder_path in folder_paths: + folder_id = self._controller.get_folder_id_from_path(folder_path) + if folder_id: + break + if folder_id: + self._folders_widget.set_selected_folder(folder_id) + + def get_selected_folder_path(self): + """Get selected folder path.""" + return self._selected_folder_path diff --git a/client/ayon_core/tools/publisher/widgets/list_view_widgets.py b/client/ayon_core/tools/publisher/widgets/list_view_widgets.py index fc76c47334..3322a73be6 100644 --- a/client/ayon_core/tools/publisher/widgets/list_view_widgets.py +++ b/client/ayon_core/tools/publisher/widgets/list_view_widgets.py @@ -123,8 +123,8 @@ class InstanceListItemWidget(QtWidgets.QWidget): instance_label = html_escape(instance_label) - subset_name_label = QtWidgets.QLabel(instance_label, self) - subset_name_label.setObjectName("ListViewSubsetName") + product_name_label = QtWidgets.QLabel(instance_label, self) + product_name_label.setObjectName("ListViewProductName") active_checkbox = NiceCheckbox(parent=self) active_checkbox.setChecked(instance["active"]) @@ -132,17 +132,17 @@ class InstanceListItemWidget(QtWidgets.QWidget): layout = QtWidgets.QHBoxLayout(self) content_margins = layout.contentsMargins() layout.setContentsMargins(content_margins.left() + 2, 0, 2, 0) - layout.addWidget(subset_name_label) + layout.addWidget(product_name_label) layout.addStretch(1) layout.addWidget(active_checkbox) self.setAttribute(QtCore.Qt.WA_TranslucentBackground) - subset_name_label.setAttribute(QtCore.Qt.WA_TranslucentBackground) + product_name_label.setAttribute(QtCore.Qt.WA_TranslucentBackground) active_checkbox.setAttribute(QtCore.Qt.WA_TranslucentBackground) active_checkbox.stateChanged.connect(self._on_active_change) - self._instance_label_widget = subset_name_label + self._instance_label_widget = product_name_label self._active_checkbox = active_checkbox self._has_valid_context = None @@ -185,7 +185,7 @@ class InstanceListItemWidget(QtWidgets.QWidget): def update_instance_values(self): """Update instance data propagated to widgets.""" - # Check subset name + # Check product name label = self.instance.label if label != self._instance_label_widget.text(): self._instance_label_widget.setText(html_escape(label)) @@ -631,8 +631,8 @@ class InstanceListView(AbstractInstanceView): # Create new item and store it as new item = QtGui.QStandardItem() - item.setData(instance["subset"], SORT_VALUE_ROLE) - item.setData(instance["subset"], GROUP_ROLE) + item.setData(instance["productName"], SORT_VALUE_ROLE) + item.setData(instance["productName"], GROUP_ROLE) item.setData(instance_id, INSTANCE_ID_ROLE) new_items.append(item) new_items_with_instance.append((item, instance)) diff --git a/client/ayon_core/tools/publisher/widgets/overview_widget.py b/client/ayon_core/tools/publisher/widgets/overview_widget.py index f1b271850a..dd82185830 100644 --- a/client/ayon_core/tools/publisher/widgets/overview_widget.py +++ b/client/ayon_core/tools/publisher/widgets/overview_widget.py @@ -5,7 +5,7 @@ from .border_label_widget import BorderedLabelWidget from .card_view_widgets import InstanceCardView from .list_view_widgets import InstanceListView from .widgets import ( - SubsetAttributesWidget, + ProductAttributesWidget, CreateInstanceBtn, RemoveInstanceBtn, ChangeViewBtn, @@ -28,72 +28,72 @@ class OverviewWidget(QtWidgets.QFrame): self._refreshing_instances = False self._controller = controller - subset_content_widget = QtWidgets.QWidget(self) + product_content_widget = QtWidgets.QWidget(self) - create_widget = CreateWidget(controller, subset_content_widget) + create_widget = CreateWidget(controller, product_content_widget) - # --- Created Subsets/Instances --- + # --- Created Products/Instances --- # Common widget for creation and overview - subset_views_widget = BorderedLabelWidget( + product_views_widget = BorderedLabelWidget( "Products to publish", - subset_content_widget + product_content_widget ) - subset_view_cards = InstanceCardView(controller, subset_views_widget) - subset_list_view = InstanceListView(controller, subset_views_widget) + product_view_cards = InstanceCardView(controller, product_views_widget) + product_list_view = InstanceListView(controller, product_views_widget) - subset_views_layout = QtWidgets.QStackedLayout() - subset_views_layout.addWidget(subset_view_cards) - subset_views_layout.addWidget(subset_list_view) - subset_views_layout.setCurrentWidget(subset_view_cards) + product_views_layout = QtWidgets.QStackedLayout() + product_views_layout.addWidget(product_view_cards) + product_views_layout.addWidget(product_list_view) + product_views_layout.setCurrentWidget(product_view_cards) - # Buttons at the bottom of subset view - create_btn = CreateInstanceBtn(subset_views_widget) - delete_btn = RemoveInstanceBtn(subset_views_widget) - change_view_btn = ChangeViewBtn(subset_views_widget) + # Buttons at the bottom of product view + create_btn = CreateInstanceBtn(product_views_widget) + delete_btn = RemoveInstanceBtn(product_views_widget) + change_view_btn = ChangeViewBtn(product_views_widget) # --- Overview --- - # Subset details widget - subset_attributes_wrap = BorderedLabelWidget( - "Publish options", subset_content_widget + # pProduct details widget + product_attributes_wrap = BorderedLabelWidget( + "Publish options", product_content_widget ) - subset_attributes_widget = SubsetAttributesWidget( - controller, subset_attributes_wrap + product_attributes_widget = ProductAttributesWidget( + controller, product_attributes_wrap ) - subset_attributes_wrap.set_center_widget(subset_attributes_widget) + product_attributes_wrap.set_center_widget(product_attributes_widget) - # Layout of buttons at the bottom of subset view - subset_view_btns_layout = QtWidgets.QHBoxLayout() - subset_view_btns_layout.setContentsMargins(0, 5, 0, 0) - subset_view_btns_layout.addWidget(create_btn) - subset_view_btns_layout.addSpacing(5) - subset_view_btns_layout.addWidget(delete_btn) - subset_view_btns_layout.addStretch(1) - subset_view_btns_layout.addWidget(change_view_btn) + # Layout of buttons at the bottom of product view + product_view_btns_layout = QtWidgets.QHBoxLayout() + product_view_btns_layout.setContentsMargins(0, 5, 0, 0) + product_view_btns_layout.addWidget(create_btn) + product_view_btns_layout.addSpacing(5) + product_view_btns_layout.addWidget(delete_btn) + product_view_btns_layout.addStretch(1) + product_view_btns_layout.addWidget(change_view_btn) # Layout of view and buttons - # - widget 'subset_view_widget' is necessary + # - widget 'product_view_widget' is necessary # - only layout won't be resized automatically to minimum size hint # on child resize request! - subset_view_widget = QtWidgets.QWidget(subset_views_widget) - subset_view_layout = QtWidgets.QVBoxLayout(subset_view_widget) - subset_view_layout.setContentsMargins(0, 0, 0, 0) - subset_view_layout.addLayout(subset_views_layout, 1) - subset_view_layout.addLayout(subset_view_btns_layout, 0) + product_view_widget = QtWidgets.QWidget(product_views_widget) + product_view_layout = QtWidgets.QVBoxLayout(product_view_widget) + product_view_layout.setContentsMargins(0, 0, 0, 0) + product_view_layout.addLayout(product_views_layout, 1) + product_view_layout.addLayout(product_view_btns_layout, 0) - subset_views_widget.set_center_widget(subset_view_widget) + product_views_widget.set_center_widget(product_view_widget) - # Whole subset layout with attributes and details - subset_content_layout = QtWidgets.QHBoxLayout(subset_content_widget) - subset_content_layout.setContentsMargins(0, 0, 0, 0) - subset_content_layout.addWidget(create_widget, 7) - subset_content_layout.addWidget(subset_views_widget, 3) - subset_content_layout.addWidget(subset_attributes_wrap, 7) + # Whole product layout with attributes and details + product_content_layout = QtWidgets.QHBoxLayout(product_content_widget) + product_content_layout.setContentsMargins(0, 0, 0, 0) + product_content_layout.addWidget(create_widget, 7) + product_content_layout.addWidget(product_views_widget, 3) + product_content_layout.addWidget(product_attributes_wrap, 7) - # Subset frame layout + # Product frame layout main_layout = QtWidgets.QVBoxLayout(self) main_layout.setContentsMargins(0, 0, 0, 0) - main_layout.addWidget(subset_content_widget, 1) + main_layout.addWidget(product_content_widget, 1) change_anim = QtCore.QVariantAnimation() change_anim.setStartValue(float(0)) @@ -101,7 +101,7 @@ class OverviewWidget(QtWidgets.QFrame): change_anim.setDuration(self.anim_duration) change_anim.setEasingCurve(QtCore.QEasingCurve.InOutQuad) - # --- Calbacks for instances/subsets view --- + # --- Calbacks for instances/products view --- create_btn.clicked.connect(self._on_create_clicked) delete_btn.clicked.connect(self._on_delete_clicked) change_view_btn.clicked.connect(self._on_change_view_clicked) @@ -110,24 +110,24 @@ class OverviewWidget(QtWidgets.QFrame): change_anim.finished.connect(self._on_change_anim_finished) # Selection changed - subset_list_view.selection_changed.connect( - self._on_subset_change + product_list_view.selection_changed.connect( + self._on_product_change ) - subset_view_cards.selection_changed.connect( - self._on_subset_change + product_view_cards.selection_changed.connect( + self._on_product_change ) # Active instances changed - subset_list_view.active_changed.connect( + product_list_view.active_changed.connect( self._on_active_changed ) - subset_view_cards.active_changed.connect( + product_view_cards.active_changed.connect( self._on_active_changed ) # Instance context has changed - subset_attributes_widget.instance_context_changed.connect( + product_attributes_widget.instance_context_changed.connect( self._on_instance_context_change ) - subset_attributes_widget.convert_requested.connect( + product_attributes_widget.convert_requested.connect( self._on_convert_requested ) @@ -145,26 +145,26 @@ class OverviewWidget(QtWidgets.QFrame): "instances.refresh.finished", self._on_instances_refresh ) - self._subset_content_widget = subset_content_widget - self._subset_content_layout = subset_content_layout + self._product_content_widget = product_content_widget + self._product_content_layout = product_content_layout - self._subset_view_cards = subset_view_cards - self._subset_list_view = subset_list_view - self._subset_views_layout = subset_views_layout + self._product_view_cards = product_view_cards + self._product_list_view = product_list_view + self._product_views_layout = product_views_layout self._create_btn = create_btn self._delete_btn = delete_btn - self._subset_attributes_widget = subset_attributes_widget + self._product_attributes_widget = product_attributes_widget self._create_widget = create_widget - self._subset_views_widget = subset_views_widget - self._subset_attributes_wrap = subset_attributes_wrap + self._product_views_widget = product_views_widget + self._product_attributes_wrap = product_attributes_wrap self._change_anim = change_anim # Start in create mode self._current_state = "create" - subset_attributes_wrap.setVisible(False) + product_attributes_wrap.setVisible(False) def make_sure_animation_is_finished(self): if self._change_anim.state() == QtCore.QAbstractAnimation.Running: @@ -193,18 +193,18 @@ class OverviewWidget(QtWidgets.QFrame): self._start_animation() def _start_animation(self): - views_geo = self._subset_views_widget.geometry() - layout_spacing = self._subset_content_layout.spacing() + views_geo = self._product_views_widget.geometry() + layout_spacing = self._product_content_layout.spacing() if self._create_widget.isVisible(): create_geo = self._create_widget.geometry() - subset_geo = QtCore.QRect(create_geo) - subset_geo.moveTop(views_geo.top()) - subset_geo.moveLeft(views_geo.right() + layout_spacing) - self._subset_attributes_wrap.setVisible(True) + product_geo = QtCore.QRect(create_geo) + product_geo.moveTop(views_geo.top()) + product_geo.moveLeft(views_geo.right() + layout_spacing) + self._product_attributes_wrap.setVisible(True) - elif self._subset_attributes_wrap.isVisible(): - subset_geo = self._subset_attributes_wrap.geometry() - create_geo = QtCore.QRect(subset_geo) + elif self._product_attributes_wrap.isVisible(): + product_geo = self._product_attributes_wrap.geometry() + create_geo = QtCore.QRect(product_geo) create_geo.moveTop(views_geo.top()) create_geo.moveRight(views_geo.left() - (layout_spacing + 1)) self._create_widget.setVisible(True) @@ -212,26 +212,26 @@ class OverviewWidget(QtWidgets.QFrame): self._change_anim.start() return - while self._subset_content_layout.count(): - self._subset_content_layout.takeAt(0) - self._subset_views_widget.setGeometry(views_geo) + while self._product_content_layout.count(): + self._product_content_layout.takeAt(0) + self._product_views_widget.setGeometry(views_geo) self._create_widget.setGeometry(create_geo) - self._subset_attributes_wrap.setGeometry(subset_geo) + self._product_attributes_wrap.setGeometry(product_geo) self._change_anim.start() - def get_subset_views_geo(self): - parent = self._subset_views_widget.parent() - global_pos = parent.mapToGlobal(self._subset_views_widget.pos()) + def get_product_views_geo(self): + parent = self._product_views_widget.parent() + global_pos = parent.mapToGlobal(self._product_views_widget.pos()) return QtCore.QRect( global_pos.x(), global_pos.y(), - self._subset_views_widget.width(), - self._subset_views_widget.height() + self._product_views_widget.width(), + self._product_views_widget.height() ) def has_items(self): - view = self._subset_views_layout.currentWidget() + view = self._product_views_layout.currentWidget() return view.has_items() def _on_create_clicked(self): @@ -272,7 +272,7 @@ class OverviewWidget(QtWidgets.QFrame): def _on_change_view_clicked(self): self._change_view_type() - def _on_subset_change(self, *_args): + def _on_product_change(self, *_args): # Ignore changes if in middle of refreshing if self._refreshing_instances: return @@ -289,7 +289,7 @@ class OverviewWidget(QtWidgets.QFrame): instances_by_id[instance_id] for instance_id in instance_ids ] - self._subset_attributes_widget.set_current_instances( + self._product_attributes_widget.set_current_instances( instances, context_selected, convertor_identifiers ) @@ -300,59 +300,59 @@ class OverviewWidget(QtWidgets.QFrame): def _on_change_anim(self, value): self._create_widget.setVisible(True) - self._subset_attributes_wrap.setVisible(True) - layout_spacing = self._subset_content_layout.spacing() + self._product_attributes_wrap.setVisible(True) + layout_spacing = self._product_content_layout.spacing() content_width = ( - self._subset_content_widget.width() - (layout_spacing * 2) + self._product_content_widget.width() - (layout_spacing * 2) ) - content_height = self._subset_content_widget.height() + content_height = self._product_content_widget.height() views_width = max( int(content_width * 0.3), - self._subset_views_widget.minimumWidth() + self._product_views_widget.minimumWidth() ) width = content_width - views_width # Visible widths of other widgets - subset_attrs_width = int((float(width) / self.anim_end_value) * value) - create_width = width - subset_attrs_width + product_attrs_width = int((float(width) / self.anim_end_value) * value) + create_width = width - product_attrs_width views_geo = QtCore.QRect( create_width + layout_spacing, 0, views_width, content_height ) create_geo = QtCore.QRect(0, 0, width, content_height) - subset_attrs_geo = QtCore.QRect(create_geo) + product_attrs_geo = QtCore.QRect(create_geo) create_geo.moveRight(views_geo.left() - (layout_spacing + 1)) - subset_attrs_geo.moveLeft(views_geo.right() + layout_spacing) + product_attrs_geo.moveLeft(views_geo.right() + layout_spacing) - self._subset_views_widget.setGeometry(views_geo) + self._product_views_widget.setGeometry(views_geo) self._create_widget.setGeometry(create_geo) - self._subset_attributes_wrap.setGeometry(subset_attrs_geo) + self._product_attributes_wrap.setGeometry(product_attrs_geo) def _on_change_anim_finished(self): self._change_visibility_for_state() - self._subset_content_layout.addWidget(self._create_widget, 7) - self._subset_content_layout.addWidget(self._subset_views_widget, 3) - self._subset_content_layout.addWidget(self._subset_attributes_wrap, 7) + self._product_content_layout.addWidget(self._create_widget, 7) + self._product_content_layout.addWidget(self._product_views_widget, 3) + self._product_content_layout.addWidget(self._product_attributes_wrap, 7) def _change_visibility_for_state(self): self._create_widget.setVisible( self._current_state == "create" ) - self._subset_attributes_wrap.setVisible( + self._product_attributes_wrap.setVisible( self._current_state == "publish" ) def _on_instance_context_change(self): - current_idx = self._subset_views_layout.currentIndex() - for idx in range(self._subset_views_layout.count()): + current_idx = self._product_views_layout.currentIndex() + for idx in range(self._product_views_layout.count()): if idx == current_idx: continue - widget = self._subset_views_layout.widget(idx) + widget = self._product_views_layout.widget(idx) if widget.refreshed: widget.set_refreshed(False) - current_widget = self._subset_views_layout.widget(current_idx) + current_widget = self._product_views_layout.widget(current_idx) current_widget.refresh_instance_states() self.instance_context_changed.emit() @@ -369,7 +369,7 @@ class OverviewWidget(QtWidgets.QFrame): convertor plugins. """ - view = self._subset_views_layout.currentWidget() + view = self._product_views_layout.currentWidget() return view.get_selected_items() def get_selected_legacy_convertors(self): @@ -384,11 +384,11 @@ class OverviewWidget(QtWidgets.QFrame): return convertor_identifiers def _change_view_type(self): - idx = self._subset_views_layout.currentIndex() - new_idx = (idx + 1) % self._subset_views_layout.count() + idx = self._product_views_layout.currentIndex() + new_idx = (idx + 1) % self._product_views_layout.count() - old_view = self._subset_views_layout.currentWidget() - new_view = self._subset_views_layout.widget(new_idx) + old_view = self._product_views_layout.currentWidget() + new_view = self._product_views_layout.widget(new_idx) if not new_view.refreshed: new_view.refresh() @@ -403,9 +403,9 @@ class OverviewWidget(QtWidgets.QFrame): instance_ids, context_selected, convertor_identifiers ) - self._subset_views_layout.setCurrentIndex(new_idx) + self._product_views_layout.setCurrentIndex(new_idx) - self._on_subset_change() + self._on_product_change() def _refresh_instances(self): if self._refreshing_instances: @@ -413,41 +413,41 @@ class OverviewWidget(QtWidgets.QFrame): self._refreshing_instances = True - for idx in range(self._subset_views_layout.count()): - widget = self._subset_views_layout.widget(idx) + for idx in range(self._product_views_layout.count()): + widget = self._product_views_layout.widget(idx) widget.set_refreshed(False) - view = self._subset_views_layout.currentWidget() + view = self._product_views_layout.currentWidget() view.refresh() view.set_refreshed(True) self._refreshing_instances = False # Force to change instance and refresh details - self._on_subset_change() + self._on_product_change() def _on_publish_start(self): """Publish started.""" self._create_btn.setEnabled(False) - self._subset_attributes_wrap.setEnabled(False) - for idx in range(self._subset_views_layout.count()): - widget = self._subset_views_layout.widget(idx) + self._product_attributes_wrap.setEnabled(False) + for idx in range(self._product_views_layout.count()): + widget = self._product_views_layout.widget(idx) widget.set_active_toggle_enabled(False) def _on_controller_reset_start(self): """Controller reset started.""" - for idx in range(self._subset_views_layout.count()): - widget = self._subset_views_layout.widget(idx) + for idx in range(self._product_views_layout.count()): + widget = self._product_views_layout.widget(idx) widget.set_active_toggle_enabled(True) def _on_publish_reset(self): """Context in controller has been reseted.""" self._create_btn.setEnabled(True) - self._subset_attributes_wrap.setEnabled(True) - self._subset_content_widget.setEnabled(self._controller.host_is_valid) + self._product_attributes_wrap.setEnabled(True) + self._product_content_widget.setEnabled(self._controller.host_is_valid) def _on_instances_refresh(self): """Controller refreshed instances.""" @@ -457,5 +457,5 @@ class OverviewWidget(QtWidgets.QFrame): # Give a change to process Resize Request QtWidgets.QApplication.processEvents() # Trigger update geometry of - widget = self._subset_views_layout.currentWidget() + widget = self._product_views_layout.currentWidget() widget.updateGeometry() diff --git a/client/ayon_core/tools/publisher/widgets/report_page.py b/client/ayon_core/tools/publisher/widgets/report_page.py index c4a37da887..1bbe8033f9 100644 --- a/client/ayon_core/tools/publisher/widgets/report_page.py +++ b/client/ayon_core/tools/publisher/widgets/report_page.py @@ -744,7 +744,7 @@ class PublishInstanceCardWidget(BaseClickableFrame): self.setObjectName("CardViewWidget") icon_widget = IconValuePixmapLabel(icon, self) - icon_widget.setObjectName("FamilyIconLabel") + icon_widget.setObjectName("ProductTypeIconLabel") label_widget = QtWidgets.QLabel(instance.label, self) diff --git a/client/ayon_core/tools/publisher/widgets/tasks_model.py b/client/ayon_core/tools/publisher/widgets/tasks_model.py new file mode 100644 index 0000000000..8f00dc37a2 --- /dev/null +++ b/client/ayon_core/tools/publisher/widgets/tasks_model.py @@ -0,0 +1,137 @@ +from qtpy import QtWidgets, QtCore, QtGui + +from ayon_core.tools.utils.lib import get_default_task_icon + +TASK_NAME_ROLE = QtCore.Qt.UserRole + 1 +TASK_TYPE_ROLE = QtCore.Qt.UserRole + 2 +TASK_ORDER_ROLE = QtCore.Qt.UserRole + 3 + + +class TasksModel(QtGui.QStandardItemModel): + """Tasks model. + + Task model must have set context of folder paths. + + Items in model are based on 0-infinite folders. Always contain + an interserction of context folder tasks. When no folders are in context + them model is empty if 2 or more are in context folders that don't have + tasks with same names then model is empty too. + + Args: + controller (PublisherController): Controller which handles creation and + publishing. + """ + def __init__(self, controller, allow_empty_task=False): + super(TasksModel, self).__init__() + + self._allow_empty_task = allow_empty_task + self._controller = controller + self._items_by_name = {} + self._folder_paths = [] + self._task_names_by_folder_path = {} + + def set_folder_paths(self, folder_paths): + """Set folders context.""" + self._folder_paths = folder_paths + self.reset() + + @staticmethod + def get_intersection_of_tasks(task_names_by_folder_path): + """Calculate intersection of task names from passed data. + + Example: + ``` + # Passed `task_names_by_folder_path` + { + "/folder_1": ["compositing", "animation"], + "/folder_2": ["compositing", "editorial"] + } + ``` + Result: + ``` + # Set + {"compositing"} + ``` + + Args: + task_names_by_folder_path (dict): Task names in iterable by parent. + """ + tasks = None + for task_names in task_names_by_folder_path.values(): + if tasks is None: + tasks = set(task_names) + else: + tasks &= set(task_names) + + if not tasks: + break + return tasks or set() + + def is_task_name_valid(self, folder_path, task_name): + """Is task name available for folder. + + Todos: + Move this method to PublisherController. + + Args: + folder_path (str): Fodler path where should look for task. + task_name (str): Name of task which should be available in folder + tasks. + """ + if folder_path not in self._task_names_by_folder_path: + return False + + if self._allow_empty_task and not task_name: + return True + + task_names = self._task_names_by_folder_path[folder_path] + if task_name in task_names: + return True + return False + + def reset(self): + """Update model by current context.""" + if not self._folder_paths: + self._items_by_name = {} + self._task_names_by_folder_path = {} + root_item = self.invisibleRootItem() + root_item.removeRows(0, self.rowCount()) + return + + task_names_by_folder_path = ( + self._controller.get_task_names_by_folder_paths( + self._folder_paths + ) + ) + + self._task_names_by_folder_path = task_names_by_folder_path + + new_task_names = self.get_intersection_of_tasks( + task_names_by_folder_path + ) + if self._allow_empty_task: + new_task_names.add("") + old_task_names = set(self._items_by_name.keys()) + if new_task_names == old_task_names: + return + + root_item = self.invisibleRootItem() + for task_name in old_task_names: + if task_name not in new_task_names: + item = self._items_by_name.pop(task_name) + root_item.removeRow(item.row()) + + new_items = [] + for task_name in new_task_names: + if task_name in self._items_by_name: + continue + + item = QtGui.QStandardItem(task_name) + item.setData(task_name, TASK_NAME_ROLE) + if task_name: + item.setData(get_default_task_icon(), QtCore.Qt.DecorationRole) + self._items_by_name[task_name] = item + new_items.append(item) + + if new_items: + root_item.appendRows(new_items) diff --git a/client/ayon_core/tools/publisher/widgets/tasks_widget.py b/client/ayon_core/tools/publisher/widgets/tasks_widget.py deleted file mode 100644 index 44e290408a..0000000000 --- a/client/ayon_core/tools/publisher/widgets/tasks_widget.py +++ /dev/null @@ -1,183 +0,0 @@ -from qtpy import QtCore, QtGui - -from ayon_core.tools.utils.tasks_widget import TasksWidget, TASK_NAME_ROLE -from ayon_core.tools.utils.lib import get_default_task_icon - - -class TasksModel(QtGui.QStandardItemModel): - """Tasks model. - - Task model must have set context of asset documents. - - Items in model are based on 0-infinite asset documents. Always contain - an interserction of context asset tasks. When no assets are in context - them model is empty if 2 or more are in context assets that don't have - tasks with same names then model is empty too. - - Args: - controller (PublisherController): Controller which handles creation and - publishing. - """ - def __init__(self, controller, allow_empty_task=False): - super(TasksModel, self).__init__() - - self._allow_empty_task = allow_empty_task - self._controller = controller - self._items_by_name = {} - self._asset_names = [] - self._task_names_by_asset_name = {} - - def set_asset_names(self, asset_names): - """Set assets context.""" - self._asset_names = asset_names - self.reset() - - @staticmethod - def get_intersection_of_tasks(task_names_by_asset_name): - """Calculate intersection of task names from passed data. - - Example: - ``` - # Passed `task_names_by_asset_name` - { - "asset_1": ["compositing", "animation"], - "asset_2": ["compositing", "editorial"] - } - ``` - Result: - ``` - # Set - {"compositing"} - ``` - - Args: - task_names_by_asset_name (dict): Task names in iterable by parent. - """ - tasks = None - for task_names in task_names_by_asset_name.values(): - if tasks is None: - tasks = set(task_names) - else: - tasks &= set(task_names) - - if not tasks: - break - return tasks or set() - - def is_task_name_valid(self, asset_name, task_name): - """Is task name available for asset. - - Args: - asset_name (str): Name of asset where should look for task. - task_name (str): Name of task which should be available in asset's - tasks. - """ - if asset_name not in self._task_names_by_asset_name: - return False - - if self._allow_empty_task and not task_name: - return True - - task_names = self._task_names_by_asset_name[asset_name] - if task_name in task_names: - return True - return False - - def reset(self): - """Update model by current context.""" - if not self._asset_names: - self._items_by_name = {} - self._task_names_by_asset_name = {} - self.clear() - return - - task_names_by_asset_name = ( - self._controller.get_task_names_by_asset_names(self._asset_names) - ) - - self._task_names_by_asset_name = task_names_by_asset_name - - new_task_names = self.get_intersection_of_tasks( - task_names_by_asset_name - ) - if self._allow_empty_task: - new_task_names.add("") - old_task_names = set(self._items_by_name.keys()) - if new_task_names == old_task_names: - return - - root_item = self.invisibleRootItem() - for task_name in old_task_names: - if task_name not in new_task_names: - item = self._items_by_name.pop(task_name) - root_item.removeRow(item.row()) - - new_items = [] - for task_name in new_task_names: - if task_name in self._items_by_name: - continue - - item = QtGui.QStandardItem(task_name) - item.setData(task_name, TASK_NAME_ROLE) - if task_name: - item.setData(get_default_task_icon(), QtCore.Qt.DecorationRole) - self._items_by_name[task_name] = item - new_items.append(item) - - if new_items: - root_item.appendRows(new_items) - - def headerData(self, section, orientation, role=None): - if role is None: - role = QtCore.Qt.EditRole - # Show nice labels in the header - if section == 0: - if ( - role in (QtCore.Qt.DisplayRole, QtCore.Qt.EditRole) - and orientation == QtCore.Qt.Horizontal - ): - return "Tasks" - - return super(TasksModel, self).headerData(section, orientation, role) - - -class CreateWidgetTasksWidget(TasksWidget): - def __init__(self, controller, parent): - self._controller = controller - super(CreateWidgetTasksWidget, self).__init__(None, parent) - - self._enabled = None - - def _create_source_model(self): - return TasksModel(self._controller) - - def set_asset_name(self, asset_name): - current = self.get_selected_task_name() - if current: - self._last_selected_task_name = current - - self._tasks_model.set_asset_names([asset_name]) - if self._last_selected_task_name and self._enabled: - self.select_task_name(self._last_selected_task_name) - - # Force a task changed emit. - self.task_changed.emit() - - def select_task_name(self, task_name): - super(CreateWidgetTasksWidget, self).select_task_name(task_name) - if not self._enabled: - current = self.get_selected_task_name() - if current: - self._last_selected_task_name = current - self._clear_selection() - - def set_enabled(self, enabled): - self._enabled = enabled - if not enabled: - last_selected_task_name = self.get_selected_task_name() - if last_selected_task_name: - self._last_selected_task_name = last_selected_task_name - self._clear_selection() - - elif self._last_selected_task_name is not None: - self.select_task_name(self._last_selected_task_name) diff --git a/client/ayon_core/tools/publisher/widgets/widgets.py b/client/ayon_core/tools/publisher/widgets/widgets.py index bd5ab250bd..4005cf2c84 100644 --- a/client/ayon_core/tools/publisher/widgets/widgets.py +++ b/client/ayon_core/tools/publisher/widgets/widgets.py @@ -22,12 +22,12 @@ from ayon_core.tools.utils import ( ) from ayon_core.style import get_objected_colors from ayon_core.pipeline.create import ( - SUBSET_NAME_ALLOWED_SYMBOLS, + PRODUCT_NAME_ALLOWED_SYMBOLS, TaskNotSetError, ) from .thumbnail_widget import ThumbnailWidget -from .assets_widget import AssetsDialog -from .tasks_widget import TasksModel +from .folders_dialog import FoldersDialog +from .tasks_model import TasksModel from .icons import ( get_pixmap, get_icon_path @@ -127,7 +127,7 @@ class ContextWarningLabel(PublishPixmapLabel): self.setToolTip( "Contain invalid context. Please check details." ) - self.setObjectName("FamilyIconLabel") + self.setObjectName("ProductTypeIconLabel") class PublishIconBtn(IconButton): @@ -422,29 +422,29 @@ class ClickableLineEdit(QtWidgets.QLineEdit): event.accept() -class AssetsField(BaseClickableFrame): - """Field where asset name of selected instance/s is showed. +class FoldersFields(BaseClickableFrame): + """Field where folder path of selected instance/s is showed. - Click on the field will trigger `AssetsDialog`. + Click on the field will trigger `FoldersDialog`. """ value_changed = QtCore.Signal() def __init__(self, controller, parent): - super(AssetsField, self).__init__(parent) - self.setObjectName("AssetNameInputWidget") + super(FoldersFields, self).__init__(parent) + self.setObjectName("FolderPathInputWidget") # Don't use 'self' for parent! # - this widget has specific styles - dialog = AssetsDialog(controller, parent) + dialog = FoldersDialog(controller, parent) name_input = ClickableLineEdit(self) - name_input.setObjectName("AssetNameInput") + name_input.setObjectName("FolderPathInput") icon_name = "fa.window-maximize" icon = qtawesome.icon(icon_name, color="white") icon_btn = QtWidgets.QPushButton(self) icon_btn.setIcon(icon) - icon_btn.setObjectName("AssetNameInputButton") + icon_btn.setObjectName("FolderPathInputButton") layout = QtWidgets.QHBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) @@ -465,6 +465,7 @@ class AssetsField(BaseClickableFrame): icon_btn.clicked.connect(self._mouse_release_callback) dialog.finished.connect(self._on_dialog_finish) + self._controller = controller self._dialog = dialog self._name_input = name_input self._icon_btn = icon_btn @@ -480,28 +481,28 @@ class AssetsField(BaseClickableFrame): if not result: return - asset_name = self._dialog.get_selected_asset() - if asset_name is None: + folder_path = self._dialog.get_selected_folder_path() + if folder_path is None: return - self._selected_items = [asset_name] + self._selected_items = [folder_path] self._has_value_changed = ( self._origin_value != self._selected_items ) - self.set_text(asset_name) + self.set_text(folder_path) self._set_is_valid(True) self.value_changed.emit() def _mouse_release_callback(self): - self._dialog.set_selected_assets(self._selected_items) + self._dialog.set_selected_folders(self._selected_items) self._dialog.open() def set_multiselection_text(self, text): - """Change text for multiselection of different assets. + """Change text for multiselection of different folders. When there are selected multiple instances at once and they don't have - same asset in context. + same folder in context. """ self._multiselection_text = text @@ -520,63 +521,58 @@ class AssetsField(BaseClickableFrame): set_style_property(self._icon_btn, "state", state) def is_valid(self): - """Is asset valid.""" + """Is folder valid.""" return self._is_valid def has_value_changed(self): - """Value of asset has changed.""" + """Value of folder has changed.""" return self._has_value_changed def get_selected_items(self): - """Selected asset names.""" + """Selected folder paths.""" return list(self._selected_items) def set_text(self, text): """Set text in text field. - Does not change selected items (assets). + Does not change selected items (folders). """ self._name_input.setText(text) self._name_input.end(False) - def set_selected_items(self, asset_names=None): - """Set asset names for selection of instances. + def set_selected_items(self, folder_paths=None): + """Set folder paths for selection of instances. - Passed asset names are validated and if there are 2 or more different - asset names then multiselection text is shown. + Passed folder paths are validated and if there are 2 or more different + folder paths then multiselection text is shown. Args: - asset_names (list, tuple, set, NoneType): List of asset names. + folder_paths (list, tuple, set, NoneType): List of folder paths. + """ - if asset_names is None: - asset_names = [] + if folder_paths is None: + folder_paths = [] self._has_value_changed = False - self._origin_value = list(asset_names) - self._selected_items = list(asset_names) - is_valid = True - if not asset_names: + self._origin_value = list(folder_paths) + self._selected_items = list(folder_paths) + is_valid = self._controller.are_folder_paths_valid(folder_paths) + if not folder_paths: self.set_text("") - elif len(asset_names) == 1: - asset_name = tuple(asset_names)[0] - is_valid = self._dialog.name_is_valid(asset_name) - self.set_text(asset_name) + elif len(folder_paths) == 1: + folder_path = tuple(folder_paths)[0] + self.set_text(folder_path) else: - for asset_name in asset_names: - is_valid = self._dialog.name_is_valid(asset_name) - if not is_valid: - break - multiselection_text = self._multiselection_text if multiselection_text is None: - multiselection_text = "|".join(asset_names) + multiselection_text = "|".join(folder_paths) self.set_text(multiselection_text) self._set_is_valid(is_valid) def reset_to_origin(self): - """Change to asset names set with last `set_selected_items` call.""" + """Change to folder paths set with last `set_selected_items` call.""" self.set_selected_items(self._origin_value) def confirm_value(self): @@ -610,9 +606,9 @@ class TasksCombobox(QtWidgets.QComboBox): """Combobox to show tasks for selected instances. Combobox gives ability to select only from intersection of task names for - asset names in selected instances. + folder paths in selected instances. - If asset names in selected instances does not have same tasks then combobox + If folder paths in selected instances does not have same tasks then combobox will be empty. """ value_changed = QtCore.Signal() @@ -746,23 +742,23 @@ class TasksCombobox(QtWidgets.QComboBox): """ return list(self._selected_items) - def set_asset_names(self, asset_names): - """Set asset names for which should show tasks.""" + def set_folder_paths(self, folder_paths): + """Set folder paths for which should show tasks.""" self._ignore_index_change = True - self._model.set_asset_names(asset_names) + self._model.set_folder_paths(folder_paths) self._proxy_model.set_filter_empty(False) self._proxy_model.sort(0) self._ignore_index_change = False - # It is a bug if not exactly one asset got here - if len(asset_names) != 1: + # It is a bug if not exactly one folder got here + if len(folder_paths) != 1: self.set_selected_item("") self._set_is_valid(False) return - asset_name = tuple(asset_names)[0] + folder_path = tuple(folder_paths)[0] is_valid = False if self._selected_items: @@ -770,7 +766,7 @@ class TasksCombobox(QtWidgets.QComboBox): valid_task_names = [] for task_name in self._selected_items: - _is_valid = self._model.is_task_name_valid(asset_name, task_name) + _is_valid = self._model.is_task_name_valid(folder_path, task_name) if _is_valid: valid_task_names.append(task_name) else: @@ -791,42 +787,42 @@ class TasksCombobox(QtWidgets.QComboBox): self._set_is_valid(is_valid) - def confirm_value(self, asset_names): + def confirm_value(self, folder_paths): new_task_name = self._selected_items[0] self._origin_value = [ - (asset_name, new_task_name) - for asset_name in asset_names + (folder_path, new_task_name) + for folder_path in folder_paths ] self._origin_selection = copy.deepcopy(self._selected_items) self._has_value_changed = False - def set_selected_items(self, asset_task_combinations=None): + def set_selected_items(self, folder_task_combinations=None): """Set items for selected instances. Args: - asset_task_combinations (list): List of tuples. Each item in - the list contain asset name and task name. + folder_task_combinations (list): List of tuples. Each item in + the list contain folder path and task name. """ self._proxy_model.set_filter_empty(False) self._proxy_model.sort(0) - if asset_task_combinations is None: - asset_task_combinations = [] + if folder_task_combinations is None: + folder_task_combinations = [] task_names = set() - task_names_by_asset_name = collections.defaultdict(set) - for asset_name, task_name in asset_task_combinations: + task_names_by_folder_path = collections.defaultdict(set) + for folder_path, task_name in folder_task_combinations: task_names.add(task_name) - task_names_by_asset_name[asset_name].add(task_name) - asset_names = set(task_names_by_asset_name.keys()) + task_names_by_folder_path[folder_path].add(task_name) + folder_paths = set(task_names_by_folder_path.keys()) self._ignore_index_change = True - self._model.set_asset_names(asset_names) + self._model.set_folder_paths(folder_paths) self._has_value_changed = False - self._origin_value = copy.deepcopy(asset_task_combinations) + self._origin_value = copy.deepcopy(folder_task_combinations) self._origin_selection = list(task_names) self._selected_items = list(task_names) @@ -840,9 +836,9 @@ class TasksCombobox(QtWidgets.QComboBox): task_name = tuple(task_names)[0] idx = self.findText(task_name) is_valid = not idx < 0 - if not is_valid and len(asset_names) > 1: - is_valid = self._validate_task_names_by_asset_names( - task_names_by_asset_name + if not is_valid and len(folder_paths) > 1: + is_valid = self._validate_task_names_by_folder_paths( + task_names_by_folder_path ) self.set_selected_item(task_name) @@ -853,9 +849,9 @@ class TasksCombobox(QtWidgets.QComboBox): if not is_valid: break - if not is_valid and len(asset_names) > 1: - is_valid = self._validate_task_names_by_asset_names( - task_names_by_asset_name + if not is_valid and len(folder_paths) > 1: + is_valid = self._validate_task_names_by_folder_paths( + task_names_by_folder_path ) multiselection_text = self._multiselection_text if multiselection_text is None: @@ -868,10 +864,10 @@ class TasksCombobox(QtWidgets.QComboBox): self.value_changed.emit() - def _validate_task_names_by_asset_names(self, task_names_by_asset_name): - for asset_name, task_names in task_names_by_asset_name.items(): + def _validate_task_names_by_folder_paths(self, task_names_by_folder_path): + for folder_path, task_names in task_names_by_folder_path.items(): for task_name in task_names: - if not self._model.is_task_name_valid(asset_name, task_name): + if not self._model.is_task_name_valid(folder_path, task_name): return False return True @@ -901,7 +897,7 @@ class VariantInputWidget(PlaceholderLineEdit): self.setObjectName("VariantInput") self.setToolTip(VARIANT_TOOLTIP) - name_pattern = "^[{}]*$".format(SUBSET_NAME_ALLOWED_SYMBOLS) + name_pattern = "^[{}]*$".format(PRODUCT_NAME_ALLOWED_SYMBOLS) self._name_pattern = name_pattern self._compiled_name_pattern = re.compile(name_pattern) @@ -1077,10 +1073,10 @@ class MultipleItemWidget(QtWidgets.QWidget): class GlobalAttrsWidget(QtWidgets.QWidget): - """Global attributes mainly to define context and subset name of instances. + """Global attributes mainly to define context and product name of instances. - Subset name is or may be affected on context. Gives abiity to modify - context and subset name of instance. This change is not autopromoted but + product name is or may be affected on context. Gives abiity to modify + context and product name of instance. This change is not autopromoted but must be submitted. Warning: Until artist hit `Submit` changes must not be propagated to @@ -1088,10 +1084,10 @@ class GlobalAttrsWidget(QtWidgets.QWidget): Global attributes contain these widgets: Variant: [ text input ] - Asset: [ asset dialog ] + Folder: [ folder dialog ] Task: [ combobox ] - Family: [ immutable ] - Subset name: [ immutable ] + Product type: [ immutable ] + product name: [ immutable ] [Submit] [Cancel] """ instance_context_changed = QtCore.Signal() @@ -1106,20 +1102,20 @@ class GlobalAttrsWidget(QtWidgets.QWidget): self._current_instances = [] variant_input = VariantInputWidget(self) - asset_value_widget = AssetsField(controller, self) + folder_value_widget = FoldersFields(controller, self) task_value_widget = TasksCombobox(controller, self) - family_value_widget = MultipleItemWidget(self) - subset_value_widget = MultipleItemWidget(self) + product_type_value_widget = MultipleItemWidget(self) + product_value_widget = MultipleItemWidget(self) variant_input.set_multiselection_text(self.multiselection_text) - asset_value_widget.set_multiselection_text(self.multiselection_text) + folder_value_widget.set_multiselection_text(self.multiselection_text) task_value_widget.set_multiselection_text(self.multiselection_text) variant_input.set_value() - asset_value_widget.set_selected_items() + folder_value_widget.set_selected_items() task_value_widget.set_selected_items() - family_value_widget.set_value() - subset_value_widget.set_value() + product_type_value_widget.set_value() + product_value_widget.set_value() submit_btn = QtWidgets.QPushButton("Confirm", self) cancel_btn = QtWidgets.QPushButton("Cancel", self) @@ -1137,23 +1133,23 @@ class GlobalAttrsWidget(QtWidgets.QWidget): main_layout.setHorizontalSpacing(INPUTS_LAYOUT_HSPACING) main_layout.setVerticalSpacing(INPUTS_LAYOUT_VSPACING) main_layout.addRow("Variant", variant_input) - main_layout.addRow("Folder", asset_value_widget) + main_layout.addRow("Folder", folder_value_widget) main_layout.addRow("Task", task_value_widget) - main_layout.addRow("Product type", family_value_widget) - main_layout.addRow("Product name", subset_value_widget) + main_layout.addRow("Product type", product_type_value_widget) + main_layout.addRow("Product name", product_value_widget) main_layout.addRow(btns_layout) variant_input.value_changed.connect(self._on_variant_change) - asset_value_widget.value_changed.connect(self._on_asset_change) + folder_value_widget.value_changed.connect(self._on_folder_change) task_value_widget.value_changed.connect(self._on_task_change) submit_btn.clicked.connect(self._on_submit) cancel_btn.clicked.connect(self._on_cancel) self.variant_input = variant_input - self.asset_value_widget = asset_value_widget + self.folder_value_widget = folder_value_widget self.task_value_widget = task_value_widget - self.family_value_widget = family_value_widget - self.subset_value_widget = subset_value_widget + self.product_type_value_widget = product_type_value_widget + self.product_value_widget = product_value_widget self.submit_btn = submit_btn self.cancel_btn = cancel_btn @@ -1161,67 +1157,67 @@ class GlobalAttrsWidget(QtWidgets.QWidget): """Commit changes for selected instances.""" variant_value = None - asset_name = None + folder_path = None task_name = None if self.variant_input.has_value_changed(): variant_value = self.variant_input.get_value()[0] - if self.asset_value_widget.has_value_changed(): - asset_name = self.asset_value_widget.get_selected_items()[0] + if self.folder_value_widget.has_value_changed(): + folder_path = self.folder_value_widget.get_selected_items()[0] if self.task_value_widget.has_value_changed(): task_name = self.task_value_widget.get_selected_items()[0] - subset_names = set() + product_names = set() invalid_tasks = False - asset_names = [] + folder_paths = [] for instance in self._current_instances: new_variant_value = instance.get("variant") - new_asset_name = instance.get("folderPath") + new_folder_path = instance.get("folderPath") new_task_name = instance.get("task") if variant_value is not None: new_variant_value = variant_value - if asset_name is not None: - new_asset_name = asset_name + if folder_path is not None: + new_folder_path = folder_path if task_name is not None: new_task_name = task_name - asset_names.append(new_asset_name) + folder_paths.append(new_folder_path) try: - new_subset_name = self._controller.get_subset_name( + new_product_name = self._controller.get_product_name( instance.creator_identifier, new_variant_value, new_task_name, - new_asset_name, + new_folder_path, instance.id, ) except TaskNotSetError: invalid_tasks = True instance.set_task_invalid(True) - subset_names.add(instance["subset"]) + product_names.add(instance["productName"]) continue - subset_names.add(new_subset_name) + product_names.add(new_product_name) if variant_value is not None: instance["variant"] = variant_value - if asset_name is not None: - instance["folderPath"] = asset_name + if folder_path is not None: + instance["folderPath"] = folder_path instance.set_asset_invalid(False) if task_name is not None: instance["task"] = task_name or None instance.set_task_invalid(False) - instance["subset"] = new_subset_name + instance["productName"] = new_product_name if invalid_tasks: self.task_value_widget.set_invalid_empty_task() - self.subset_value_widget.set_value(subset_names) + self.product_value_widget.set_value(product_names) self._set_btns_enabled(False) self._set_btns_visible(invalid_tasks) @@ -1229,11 +1225,11 @@ class GlobalAttrsWidget(QtWidgets.QWidget): if variant_value is not None: self.variant_input.confirm_value() - if asset_name is not None: - self.asset_value_widget.confirm_value() + if folder_path is not None: + self.folder_value_widget.confirm_value() if task_name is not None: - self.task_value_widget.confirm_value(asset_names) + self.task_value_widget.confirm_value(folder_paths) self.instance_context_changed.emit() @@ -1241,19 +1237,19 @@ class GlobalAttrsWidget(QtWidgets.QWidget): """Cancel changes and set back to their irigin value.""" self.variant_input.reset_to_origin() - self.asset_value_widget.reset_to_origin() + self.folder_value_widget.reset_to_origin() self.task_value_widget.reset_to_origin() self._set_btns_enabled(False) def _on_value_change(self): any_invalid = ( not self.variant_input.is_valid() - or not self.asset_value_widget.is_valid() + or not self.folder_value_widget.is_valid() or not self.task_value_widget.is_valid() ) any_changed = ( self.variant_input.has_value_changed() - or self.asset_value_widget.has_value_changed() + or self.folder_value_widget.has_value_changed() or self.task_value_widget.has_value_changed() ) self._set_btns_visible(any_changed or any_invalid) @@ -1263,9 +1259,9 @@ class GlobalAttrsWidget(QtWidgets.QWidget): def _on_variant_change(self): self._on_value_change() - def _on_asset_change(self): - asset_names = self.asset_value_widget.get_selected_items() - self.task_value_widget.set_asset_names(asset_names) + def _on_folder_change(self): + folder_paths = self.folder_value_widget.get_selected_items() + self.task_value_widget.set_folder_paths(folder_paths) self._on_value_change() def _on_task_change(self): @@ -1290,40 +1286,40 @@ class GlobalAttrsWidget(QtWidgets.QWidget): self._current_instances = instances - asset_names = set() + folder_paths = set() variants = set() - families = set() - subset_names = set() + product_types = set() + product_names = set() editable = True if len(instances) == 0: editable = False - asset_task_combinations = [] + folder_task_combinations = [] for instance in instances: # NOTE I'm not sure how this can even happen? if instance.creator_identifier is None: editable = False variants.add(instance.get("variant") or self.unknown_value) - families.add(instance.get("family") or self.unknown_value) - asset_name = instance.get("folderPath") or self.unknown_value + product_types.add(instance.get("productType") or self.unknown_value) + folder_path = instance.get("folderPath") or self.unknown_value task_name = instance.get("task") or "" - asset_names.add(asset_name) - asset_task_combinations.append((asset_name, task_name)) - subset_names.add(instance.get("subset") or self.unknown_value) + folder_paths.add(folder_path) + folder_task_combinations.append((folder_path, task_name)) + product_names.add(instance.get("productName") or self.unknown_value) self.variant_input.set_value(variants) - # Set context of asset widget - self.asset_value_widget.set_selected_items(asset_names) + # Set context of folder widget + self.folder_value_widget.set_selected_items(folder_paths) # Set context of task widget - self.task_value_widget.set_selected_items(asset_task_combinations) - self.family_value_widget.set_value(families) - self.subset_value_widget.set_value(subset_names) + self.task_value_widget.set_selected_items(folder_task_combinations) + self.product_type_value_widget.set_value(product_types) + self.product_value_widget.set_value(product_names) self.variant_input.setEnabled(editable) - self.asset_value_widget.setEnabled(editable) + self.folder_value_widget.setEnabled(editable) self.task_value_widget.setEnabled(editable) @@ -1476,7 +1472,7 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): Widgets are disabled if context of instance is not valid. Definitions are shown for all instance no matter if they have different - families. Similar definitions are merged into one (different label + product types. Similar definitions are merged into one (different label does not count). """ @@ -1624,7 +1620,7 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): plugin_val[attr_def.key] = value -class SubsetAttributesWidget(QtWidgets.QWidget): +class ProductAttributesWidget(QtWidgets.QWidget): """Wrapper widget where attributes of instance/s are modified. ┌─────────────────┬─────────────┐ │ Global │ │ @@ -1640,7 +1636,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): convert_requested = QtCore.Signal() def __init__(self, controller, parent): - super(SubsetAttributesWidget, self).__init__(parent) + super(ProductAttributesWidget, self).__init__(parent) # TOP PART top_widget = QtWidgets.QWidget(self) @@ -1666,9 +1662,9 @@ class SubsetAttributesWidget(QtWidgets.QWidget): # Set the label text with 'setText' to apply html convert_label.setText( ( - "Found old publishable subsets" + "Found old publishable products" " incompatible with new publisher." - "

Press the update subsets button" + "

Press the update products button" " to automatically update them" " to be able to publish again." ) @@ -1677,7 +1673,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): convert_label.setAlignment(QtCore.Qt.AlignCenter) convert_btn = QtWidgets.QPushButton( - "Update subsets", convert_widget + "Update products", convert_widget ) convert_separator = QtWidgets.QFrame(convert_widget) convert_separator.setObjectName("Separator") diff --git a/client/ayon_core/tools/publisher/window.py b/client/ayon_core/tools/publisher/window.py index f4dadf7f67..123864ff6c 100644 --- a/client/ayon_core/tools/publisher/window.py +++ b/client/ayon_core/tools/publisher/window.py @@ -1029,7 +1029,7 @@ class PublisherWindow(QtWidgets.QDialog): under_mouse = False my_pos = self.mapFromGlobal(global_pos) if self.rect().contains(my_pos): - widget_geo = self._overview_widget.get_subset_views_geo() + widget_geo = self._overview_widget.get_product_views_geo() widget_x = widget_geo.left() + (widget_geo.width() * 0.5) under_mouse = widget_x < global_pos.x() self._create_overlay_button.set_under_mouse(under_mouse) diff --git a/client/ayon_core/tools/push_to_project/control.py b/client/ayon_core/tools/push_to_project/control.py index 1336721e5a..3b6bd85028 100644 --- a/client/ayon_core/tools/push_to_project/control.py +++ b/client/ayon_core/tools/push_to_project/control.py @@ -9,7 +9,7 @@ from ayon_core.client import ( from ayon_core.settings import get_project_settings from ayon_core.lib import prepare_template_data from ayon_core.lib.events import QueuedEventSystem -from ayon_core.pipeline.create import get_subset_name_template +from ayon_core.pipeline.create import get_product_name_template from ayon_core.tools.ayon_utils.models import ProjectsModel, HierarchyModel from .models import ( @@ -256,12 +256,12 @@ class PushToContextController: project_settings = get_project_settings(project_name) subset_doc = self._src_subset_doc - family = subset_doc["data"].get("family") - if not family: - family = subset_doc["data"]["families"][0] - template = get_subset_name_template( + product_type = subset_doc["data"].get("family") + if not product_type: + product_type = subset_doc["data"]["families"][0] + template = get_product_name_template( self._src_project_name, - family, + product_type, task_name, task_type, None, @@ -279,28 +279,31 @@ class PushToContextController: template_s = template[:idx] template_e = template[idx + len(variant_placeholder):] fill_data = prepare_template_data({ - "family": family, + "family": product_type, + "product": { + "type": product_type, + }, "task": task_name }) try: - subset_s = template_s.format(**fill_data) - subset_e = template_e.format(**fill_data) + product_s = template_s.format(**fill_data) + product_e = template_e.format(**fill_data) except Exception as exc: print("Failed format", exc) return "" - subset_name = self._src_subset_doc["name"] + product_name = self._src_subset_doc["name"] if ( - (subset_s and not subset_name.startswith(subset_s)) - or (subset_e and not subset_name.endswith(subset_e)) + (product_s and not product_name.startswith(product_s)) + or (product_e and not product_name.endswith(product_e)) ): return "" - if subset_s: - subset_name = subset_name[len(subset_s):] - if subset_e: - subset_name = subset_name[:len(subset_e)] - return subset_name + if product_s: + product_name = product_name[len(product_s):] + if product_e: + product_name = product_name[:len(product_e)] + return product_name def _check_submit_validations(self): if not self._user_values.is_valid: diff --git a/client/ayon_core/tools/push_to_project/models/integrate.py b/client/ayon_core/tools/push_to_project/models/integrate.py index 175716cf10..b427f3d226 100644 --- a/client/ayon_core/tools/push_to_project/models/integrate.py +++ b/client/ayon_core/tools/push_to_project/models/integrate.py @@ -8,8 +8,6 @@ import sys import traceback import uuid -from bson.objectid import ObjectId - from ayon_core.client import ( get_project, get_assets, @@ -44,7 +42,7 @@ from ayon_core.pipeline import Anatomy from ayon_core.pipeline.version_start import get_versioning_start from ayon_core.pipeline.template_data import get_template_data from ayon_core.pipeline.publish import get_publish_template_name -from ayon_core.pipeline.create import get_subset_name +from ayon_core.pipeline.create import get_product_name UNKNOWN = object() @@ -463,8 +461,8 @@ class ProjectPushItemProcess: self._subset_doc = None self._version_doc = None - self._family = None - self._subset_name = None + self._product_type = None + self._product_name = None self._project_settings = None self._template_name = None @@ -496,9 +494,9 @@ class ProjectPushItemProcess: self._log_info("Destination project was found") self._fill_or_create_destination_asset() self._log_info("Destination asset was determined") - self._determine_family() + self._determine_product_type() self._determine_publish_template_name() - self._determine_subset_name() + self._determine_product_name() self._make_sure_subset_exists() self._make_sure_version_exists() self._log_info("Prerequirements were prepared") @@ -586,11 +584,11 @@ class ProjectPushItemProcess: )) raise PushToProjectError(self._status.fail_reason) - subset_id = version_doc["parent"] - subset_doc = get_subset_by_id(src_project_name, subset_id) + product_id = version_doc["parent"] + subset_doc = get_subset_by_id(src_project_name, product_id) if not subset_doc: self._status.set_failed(( - f"Could find subset with id \"{subset_id}\"" + f"Could find product with id \"{product_id}\"" f" in project \"{src_project_name}\"" )) raise PushToProjectError(self._status.fail_reason) @@ -793,29 +791,30 @@ class ProjectPushItemProcess: task_info.update(task_type_info) self._task_info = task_info - def _determine_family(self): + def _determine_product_type(self): subset_doc = self._src_subset_doc - family = subset_doc["data"].get("family") + product_type = subset_doc["data"].get("family") families = subset_doc["data"].get("families") - if not family and families: - family = families[0] + if not product_type and families: + product_type = families[0] - if not family: + if not product_type: self._status.set_failed( - "Couldn't figure out family from source subset" + "Couldn't figure out product type from source product" ) raise PushToProjectError(self._status.fail_reason) self._log_debug( - f"Publishing family is '{family}' (Based on source subset)" + f"Publishing product type is '{product_type}'" + f" (Based on source product)" ) - self._family = family + self._product_type = product_type def _determine_publish_template_name(self): template_name = get_publish_template_name( self._item.dst_project_name, self.host_name, - self._family, + self._product_type, self._task_info.get("name"), self._task_info.get("type"), project_settings=self._project_settings @@ -825,39 +824,39 @@ class ProjectPushItemProcess: ) self._template_name = template_name - def _determine_subset_name(self): - family = self._family + def _determine_product_name(self): + product_type = self._product_type asset_doc = self._asset_doc task_info = self._task_info - subset_name = get_subset_name( - family, - self._item.variant, - task_info.get("name"), + product_name = get_product_name( + self._item.dst_project_name, asset_doc, - project_name=self._item.dst_project_name, - host_name=self.host_name, + task_info.get("name"), + self.host_name, + product_type, + self._item.variant, project_settings=self._project_settings ) self._log_info( - f"Push will be integrating to subset with name '{subset_name}'" + f"Push will be integrating to product with name '{product_name}'" ) - self._subset_name = subset_name + self._product_name = product_name def _make_sure_subset_exists(self): project_name = self._item.dst_project_name asset_id = self._asset_doc["_id"] - subset_name = self._subset_name - family = self._family - subset_doc = get_subset_by_name(project_name, subset_name, asset_id) + product_name = self._product_name + product_type = self._product_type + subset_doc = get_subset_by_name(project_name, product_name, asset_id) if subset_doc: self._subset_doc = subset_doc return subset_doc data = { - "families": [family] + "families": [product_type] } subset_doc = new_subset_document( - subset_name, family, asset_id, data + product_name, product_type, asset_id, data ) self._operations.create_entity(project_name, "subset", subset_doc) self._subset_doc = subset_doc @@ -869,7 +868,7 @@ class ProjectPushItemProcess: version = self._item.dst_version src_version_doc = self._src_version_doc subset_doc = self._subset_doc - subset_id = subset_doc["_id"] + product_id = subset_doc["_id"] src_data = src_version_doc["data"] families = subset_doc["data"].get("families") if not families: @@ -886,7 +885,7 @@ class ProjectPushItemProcess: } if version is None: last_version_doc = get_last_version_by_subset_id( - project_name, subset_id + project_name, product_id ) if last_version_doc: version = int(last_version_doc["name"]) + 1 @@ -896,17 +895,17 @@ class ProjectPushItemProcess: self.host_name, task_name=self._task_info["name"], task_type=self._task_info["type"], - family=families[0], - subset=subset_doc["name"] + product_type=families[0], + product_name=subset_doc["name"] ) existing_version_doc = get_version_by_name( - project_name, version, subset_id + project_name, version, product_id ) # Update existing version if existing_version_doc: version_doc = new_version_doc( - version, subset_id, version_data, existing_version_doc["_id"] + version, product_id, version_data, existing_version_doc["_id"] ) update_data = prepare_version_update_data( existing_version_doc, version_doc @@ -923,7 +922,7 @@ class ProjectPushItemProcess: return version_doc = new_version_doc( - version, subset_id, version_data + version, product_id, version_data ) self._operations.create_entity(project_name, "version", version_doc) @@ -957,8 +956,12 @@ class ProjectPushItemProcess: self.host_name ) formatting_data.update({ - "subset": self._subset_name, - "family": self._family, + "subset": self._product_name, + "family": self._product_type, + "product": { + "name": self._product_name, + "type": self._product_type, + }, "version": version_doc["name"] }) @@ -1080,7 +1083,6 @@ class ProjectPushItemProcess: new_repre_files = [] for (path, rootless_path) in repre_filepaths: new_repre_files.append({ - "_id": ObjectId(), "path": rootless_path, "size": os.path.getsize(path), "hash": source_hash(path), diff --git a/client/ayon_core/tools/push_to_project/models/user_values.py b/client/ayon_core/tools/push_to_project/models/user_values.py index a12a1513ee..edef2fe4fb 100644 --- a/client/ayon_core/tools/push_to_project/models/user_values.py +++ b/client/ayon_core/tools/push_to_project/models/user_values.py @@ -1,6 +1,6 @@ import re -from ayon_core.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS +from ayon_core.pipeline.create import PRODUCT_NAME_ALLOWED_SYMBOLS class UserPublishValuesModel: @@ -12,7 +12,7 @@ class UserPublishValuesModel: """ folder_name_regex = re.compile("^[a-zA-Z0-9_.]+$") - variant_regex = re.compile("^[{}]+$".format(SUBSET_NAME_ALLOWED_SYMBOLS)) + variant_regex = re.compile("^[{}]+$".format(PRODUCT_NAME_ALLOWED_SYMBOLS)) def __init__(self, controller): self._controller = controller diff --git a/client/ayon_core/tools/pyblish_pype/control.py b/client/ayon_core/tools/pyblish_pype/control.py index 1a3e7a15f0..c5034e2736 100644 --- a/client/ayon_core/tools/pyblish_pype/control.py +++ b/client/ayon_core/tools/pyblish_pype/control.py @@ -202,19 +202,39 @@ class Controller(QtCore.QObject): def current_state(self): return self._current_state + @staticmethod + def _convert_filter_presets(filter_presets): + """Convert AYON settings presets to dictionary. + + Returns: + dict[str, dict[str, Any]]: Filter presets converted to dictionary. + """ + if not isinstance(filter_presets, list): + return filter_presets + + return { + filter_preset["name"]: { + item["name"]: item["value"] + for item in filter_preset["value"] + } + for filter_preset in filter_presets + } + def presets_by_hosts(self): # Get global filters as base presets = get_current_project_settings() if not presets: return {} - result = presets.get("global", {}).get("filters", {}) + result = {} hosts = pyblish.api.registered_hosts() for host in hosts: host_presets = presets.get(host, {}).get("filters") if not host_presets: continue + host_presets = self._convert_filter_presets(host_presets) + for key, value in host_presets.items(): if value is None: if key in result: diff --git a/client/ayon_core/tools/pyblish_pype/model.py b/client/ayon_core/tools/pyblish_pype/model.py index 4c91fb567f..3a402f386e 100644 --- a/client/ayon_core/tools/pyblish_pype/model.py +++ b/client/ayon_core/tools/pyblish_pype/model.py @@ -34,8 +34,6 @@ import qtawesome from six import text_type from .constants import PluginStates, InstanceStates, GroupStates, Roles -from ayon_core.settings import get_system_settings - # ItemTypes UserType = QtGui.QStandardItem.UserType @@ -105,12 +103,8 @@ class IntentModel(QtGui.QStandardItemModel): self._item_count = 0 self.default_index = 0 - intent_settings = ( - get_system_settings() - .get("modules", {}) - .get("ftrack", {}) - .get("intent", {}) - ) + # Intent settings are not available in core addon + intent_settings = {} items = intent_settings.get("items", {}) if not items: diff --git a/client/ayon_core/tools/sceneinventory/control.py b/client/ayon_core/tools/sceneinventory/control.py index 409f92b506..16b889e855 100644 --- a/client/ayon_core/tools/sceneinventory/control.py +++ b/client/ayon_core/tools/sceneinventory/control.py @@ -14,8 +14,7 @@ from .models import SiteSyncModel class SceneInventoryController: """This is a temporary controller for AYON. - Goal of this temporary controller is to provide a way to get current - context instead of using 'AvalonMongoDB' object (or 'legacy_io'). + Goal of this controller is to provide a way to get current context. Also provides (hopefully) cleaner api for site sync. """ @@ -70,10 +69,10 @@ class SceneInventoryController: context = self.get_current_context() project_name = context["project_name"] - folder_name = context.get("asset_name") + folder_path = context.get("folder_path") folder_id = None - if folder_name: - folder = ayon_api.get_folder_by_path(project_name, folder_name) + if folder_path: + folder = ayon_api.get_folder_by_path(project_name, folder_path) if folder: folder_id = folder["id"] diff --git a/client/ayon_core/tools/sceneinventory/model.py b/client/ayon_core/tools/sceneinventory/model.py index 05ecfd442d..18fc56db0b 100644 --- a/client/ayon_core/tools/sceneinventory/model.py +++ b/client/ayon_core/tools/sceneinventory/model.py @@ -43,7 +43,7 @@ class InventoryModel(TreeModel): "Name", "version", "count", - "family", + "productType", "group", "loader", "objectName", @@ -140,8 +140,8 @@ class InventoryModel(TreeModel): return qtawesome.icon("fa.file-o", color=color) if index.column() == 3: - # Family icon - return item.get("familyIcon", None) + # Product type icon + return item.get("productTypeIcon", None) column_name = self.Columns[index.column()] @@ -174,7 +174,7 @@ class InventoryModel(TreeModel): return super(InventoryModel, self).data(index, role) def set_hierarchy_view(self, state): - """Set whether to display subsets in hierarchy view.""" + """Set whether to display products in hierarchy view.""" state = bool(state) if state != self._hierarchy_view: @@ -297,7 +297,7 @@ class InventoryModel(TreeModel): self.add_child(item_node, parent=group_node) # TODO Use product icons - family_icon = qtawesome.icon( + product_type_icon = qtawesome.icon( "fa.folder", color="#0091B2" ) # Prepare site sync specific data @@ -313,18 +313,18 @@ class InventoryModel(TreeModel): subset = group_dict["subset"] asset = group_dict["asset"] - # Get the primary family + # Get product type maj_version, _ = schema.get_schema_version(subset["schema"]) if maj_version < 3: src_doc = version else: src_doc = subset - prim_family = src_doc["data"].get("family") - if not prim_family: + product_type = src_doc["data"].get("family") + if not product_type: families = src_doc["data"].get("families") if families: - prim_family = families[0] + product_type = families[0] # Store the highest available version so the model can know # whether current version is currently up-to-date. @@ -340,8 +340,8 @@ class InventoryModel(TreeModel): group_node["representation"] = repre_id group_node["version"] = version["name"] group_node["highest_version"] = highest_version["name"] - group_node["family"] = prim_family or "" - group_node["familyIcon"] = family_icon + group_node["productType"] = product_type or "" + group_node["productTypeIcon"] = product_type_icon group_node["count"] = len(group_containers) group_node["isGroupNode"] = True group_node["group"] = subset["data"].get("subsetGroup") diff --git a/client/ayon_core/tools/sceneinventory/switch_dialog/folders_input.py b/client/ayon_core/tools/sceneinventory/switch_dialog/folders_input.py index 2358a82a7f..e46c28474f 100644 --- a/client/ayon_core/tools/sceneinventory/switch_dialog/folders_input.py +++ b/client/ayon_core/tools/sceneinventory/switch_dialog/folders_input.py @@ -196,19 +196,19 @@ class FoldersField(BaseClickableFrame): def __init__(self, controller, parent): super(FoldersField, self).__init__(parent) - self.setObjectName("AssetNameInputWidget") + self.setObjectName("FolderPathInputWidget") # Don't use 'self' for parent! # - this widget has specific styles dialog = FoldersDialog(controller, parent) name_input = ClickableLineEdit(self) - name_input.setObjectName("AssetNameInput") + name_input.setObjectName("FolderPathInput") icon = qtawesome.icon("fa.window-maximize", color="white") icon_btn = QtWidgets.QPushButton(self) icon_btn.setIcon(icon) - icon_btn.setObjectName("AssetNameInputButton") + icon_btn.setObjectName("FolderPathInputButton") layout = QtWidgets.QHBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) diff --git a/client/ayon_core/tools/sceneinventory/view.py b/client/ayon_core/tools/sceneinventory/view.py index 214be68ae0..80c89338f5 100644 --- a/client/ayon_core/tools/sceneinventory/view.py +++ b/client/ayon_core/tools/sceneinventory/view.py @@ -112,17 +112,17 @@ class SceneInventoryView(QtWidgets.QTreeView): loaded_hero_versions = [] versions_by_parent_id = collections.defaultdict(list) - subset_ids = set() + product_ids = set() for version in loaded_versions: if version["type"] == "hero_version": loaded_hero_versions.append(version) else: parent_id = version["parent"] versions_by_parent_id[parent_id].append(version) - subset_ids.add(parent_id) + product_ids.add(parent_id) all_versions = get_versions( - project_name, subset_ids=subset_ids, hero=True + project_name, subset_ids=product_ids, hero=True ) hero_versions = [] versions = [] diff --git a/client/ayon_core/tools/sceneinventory/window.py b/client/ayon_core/tools/sceneinventory/window.py index f1bea26bb9..9584524edd 100644 --- a/client/ayon_core/tools/sceneinventory/window.py +++ b/client/ayon_core/tools/sceneinventory/window.py @@ -78,7 +78,7 @@ class SceneInventoryWindow(QtWidgets.QDialog): view.setColumnWidth(0, 250) # name view.setColumnWidth(1, 55) # version view.setColumnWidth(2, 55) # count - view.setColumnWidth(3, 150) # family + view.setColumnWidth(3, 150) # product type view.setColumnWidth(4, 120) # group view.setColumnWidth(5, 150) # loader diff --git a/client/ayon_core/tools/subsetmanager/README.md b/client/ayon_core/tools/subsetmanager/README.md index 062214834a..35b80ea114 100644 --- a/client/ayon_core/tools/subsetmanager/README.md +++ b/client/ayon_core/tools/subsetmanager/README.md @@ -14,6 +14,6 @@ Host is expected to implemented: - `list_instances` - returning list of dictionaries (instances), must contain unique uuid field example: - ```[{"uuid":"15","active":true,"subset":"imageBG","family":"image","id":"pyblish.avalon.instance","asset":"Town"}]``` + ```[{"uuid":"15","active":true,"subset":"imageBG","family":"image","id":"ayon.create.instance","asset":"Town"}]``` - `remove_instance(instance)` - removes instance from file's metadata instance is a dictionary, with uuid field \ No newline at end of file diff --git a/client/ayon_core/tools/subsetmanager/model.py b/client/ayon_core/tools/subsetmanager/model.py index 638d096918..4964abd86d 100644 --- a/client/ayon_core/tools/subsetmanager/model.py +++ b/client/ayon_core/tools/subsetmanager/model.py @@ -32,7 +32,11 @@ class InstanceModel(QtGui.QStandardItemModel): items = [] for instance_data in instances: item_id = str(uuid.uuid4()) - label = instance_data.get("label") or instance_data["subset"] + product_name = ( + instance_data.get("productName") + or instance_data.get("subset") + ) + label = instance_data.get("label") or product_name item = QtGui.QStandardItem(label) item.setEnabled(True) item.setEditable(False) diff --git a/client/ayon_core/tools/subsetmanager/window.py b/client/ayon_core/tools/subsetmanager/window.py index 97dab1adb2..164ffa95a7 100644 --- a/client/ayon_core/tools/subsetmanager/window.py +++ b/client/ayon_core/tools/subsetmanager/window.py @@ -45,7 +45,7 @@ class SubsetManagerWindow(QtWidgets.QDialog): # Filter input filter_input = PlaceholderLineEdit(header_widget) - filter_input.setPlaceholderText("Filter subsets..") + filter_input.setPlaceholderText("Filter products..") # Refresh button icon = qtawesome.icon("fa.refresh", color="white") diff --git a/client/ayon_core/tools/texture_copy/app.py b/client/ayon_core/tools/texture_copy/app.py index 064f4e5577..120051060b 100644 --- a/client/ayon_core/tools/texture_copy/app.py +++ b/client/ayon_core/tools/texture_copy/app.py @@ -6,7 +6,7 @@ import speedcopy from ayon_core.client import get_project, get_asset_by_name from ayon_core.lib import Terminal -from ayon_core.pipeline import legacy_io, Anatomy +from ayon_core.pipeline import Anatomy t = Terminal() @@ -16,11 +16,6 @@ texture_extensions = ['.tif', '.tiff', '.jpg', '.jpeg', '.tx', '.png', '.tga', class TextureCopy: - - def __init__(self): - if not legacy_io.Session: - legacy_io.install() - def _get_textures(self, path): textures = [] for dir, subdir, files in os.walk(path): @@ -36,14 +31,23 @@ class TextureCopy: if parents and len(parents) > 0: hierarchy = os.path.join(*parents) + product_name = "Main" + product_type = "texture" template_data = { "project": { "name": project_name, "code": project['data']['code'] }, - "asset": asset['name'], - "family": 'texture', - "subset": 'Main', + "asset": asset["name"], + "family": product_type, + "subset": product_name, + "folder": { + "name": asset["name"], + }, + "product": { + "name": product_name, + "type": product_type, + }, "hierarchy": hierarchy } anatomy = Anatomy(project_name) @@ -137,8 +141,8 @@ class TextureCopy: def texture_copy(asset, project, path): t.echo("*** Running Texture tool ***") t.echo(">>> Initializing avalon session ...") - os.environ["AVALON_PROJECT"] = project - os.environ["AVALON_ASSET"] = asset + os.environ["AYON_PROJECT_NAME"] = project + os.environ["AYON_FOLDER_PATH"] = asset TextureCopy().process(asset, project, path) diff --git a/client/ayon_core/tools/tray/__init__.py b/client/ayon_core/tools/tray/__init__.py index f5e558e0bb..49130e660a 100644 --- a/client/ayon_core/tools/tray/__init__.py +++ b/client/ayon_core/tools/tray/__init__.py @@ -1,5 +1,6 @@ from .tray import main + __all__ = ( "main", ) diff --git a/client/ayon_core/tools/tray/dialogs.py b/client/ayon_core/tools/tray/dialogs.py new file mode 100644 index 0000000000..67348284a1 --- /dev/null +++ b/client/ayon_core/tools/tray/dialogs.py @@ -0,0 +1,155 @@ +import os + +from qtpy import QtWidgets, QtCore, QtGui + +from ayon_core import resources, style +from ayon_core.tools.utils import paint_image_with_color + + +class PixmapLabel(QtWidgets.QLabel): + """Label resizing image to height of font.""" + def __init__(self, pixmap, parent): + super(PixmapLabel, self).__init__(parent) + self._empty_pixmap = QtGui.QPixmap(0, 0) + self._source_pixmap = pixmap + + def set_source_pixmap(self, pixmap): + """Change source image.""" + self._source_pixmap = pixmap + self._set_resized_pix() + + def _get_pix_size(self): + size = self.fontMetrics().height() * 3 + return size, size + + def _set_resized_pix(self): + if self._source_pixmap is None: + self.setPixmap(self._empty_pixmap) + return + width, height = self._get_pix_size() + self.setPixmap( + self._source_pixmap.scaled( + width, + height, + QtCore.Qt.KeepAspectRatio, + QtCore.Qt.SmoothTransformation + ) + ) + + def resizeEvent(self, event): + self._set_resized_pix() + super(PixmapLabel, self).resizeEvent(event) + + +class UpdateDialog(QtWidgets.QDialog): + restart_requested = QtCore.Signal() + ignore_requested = QtCore.Signal() + + _min_width = 400 + _min_height = 130 + + def __init__(self, parent=None): + super(UpdateDialog, self).__init__(parent) + + icon = QtGui.QIcon(resources.get_ayon_icon_filepath()) + self.setWindowIcon(icon) + self.setWindowTitle("AYON update") + self.setWindowFlags( + self.windowFlags() + | QtCore.Qt.WindowStaysOnTopHint + ) + + self.setMinimumWidth(self._min_width) + self.setMinimumHeight(self._min_height) + + top_widget = QtWidgets.QWidget(self) + + gift_pixmap = self._get_gift_pixmap() + gift_icon_label = PixmapLabel(gift_pixmap, top_widget) + + label_widget = QtWidgets.QLabel( + ( + "Your AYON needs to update." + "

Please restart AYON launcher and all running" + " applications as soon as possible." + ), + top_widget + ) + label_widget.setWordWrap(True) + + top_layout = QtWidgets.QHBoxLayout(top_widget) + top_layout.setSpacing(10) + top_layout.addWidget(gift_icon_label, 0, QtCore.Qt.AlignCenter) + top_layout.addWidget(label_widget, 1) + + ignore_btn = QtWidgets.QPushButton("Ignore", self) + restart_btn = QtWidgets.QPushButton("Restart && Change", self) + restart_btn.setObjectName("TrayRestartButton") + + btns_layout = QtWidgets.QHBoxLayout() + btns_layout.addStretch(1) + btns_layout.addWidget(ignore_btn, 0) + btns_layout.addWidget(restart_btn, 0) + + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(top_widget, 0) + layout.addStretch(1) + layout.addLayout(btns_layout, 0) + + ignore_btn.clicked.connect(self._on_ignore) + restart_btn.clicked.connect(self._on_reset) + + self._label_widget = label_widget + self._gift_icon_label = gift_icon_label + self._ignore_btn = ignore_btn + self._restart_btn = restart_btn + + self._restart_accepted = False + self._current_is_higher = False + + self._close_silently = False + + self.setStyleSheet(style.load_stylesheet()) + + def close_silently(self): + self._close_silently = True + self.close() + + def showEvent(self, event): + super(UpdateDialog, self).showEvent(event) + self._close_silently = False + self._restart_accepted = False + + def closeEvent(self, event): + super(UpdateDialog, self).closeEvent(event) + if self._restart_accepted or self._current_is_higher: + return + + if self._close_silently: + return + + # Trigger ignore requested only if restart was not clicked and current + # version is lower + self.ignore_requested.emit() + + def _on_ignore(self): + self.reject() + + def _on_reset(self): + self._restart_accepted = True + self.restart_requested.emit() + self.accept() + + def _get_gift_pixmap(self): + image_path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "images", + "gifts.png" + ) + src_image = QtGui.QImage(image_path) + color_value = style.get_objected_colors("font") + + return paint_image_with_color( + src_image, + color_value.get_qcolor() + ) diff --git a/client/ayon_core/tools/tray/tray.py b/client/ayon_core/tools/tray/tray.py index 3a70d68466..3c6c529be8 100644 --- a/client/ayon_core/tools/tray/tray.py +++ b/client/ayon_core/tools/tray/tray.py @@ -1,10 +1,11 @@ -import collections import os import sys +import collections import atexit import platform +import ayon_api from qtpy import QtCore, QtGui, QtWidgets from ayon_core import resources, style @@ -12,57 +13,25 @@ from ayon_core.lib import ( Logger, get_ayon_launcher_args, run_detached_process, + is_dev_mode_enabled, + is_staging_enabled, + is_running_from_build, ) -from ayon_core.lib import is_running_from_build +from ayon_core.settings import get_studio_settings from ayon_core.addon import ( ITrayAction, ITrayService, TrayAddonsManager, ) -from ayon_core.settings import get_system_settings from ayon_core.tools.utils import ( WrappedCallbackItem, get_ayon_qt_app, ) from .info_widget import InfoWidget - - -# TODO PixmapLabel should be moved to 'utils' in other future PR so should be -# imported from there -class PixmapLabel(QtWidgets.QLabel): - """Label resizing image to height of font.""" - def __init__(self, pixmap, parent): - super(PixmapLabel, self).__init__(parent) - self._empty_pixmap = QtGui.QPixmap(0, 0) - self._source_pixmap = pixmap - - def set_source_pixmap(self, pixmap): - """Change source image.""" - self._source_pixmap = pixmap - self._set_resized_pix() - - def _get_pix_size(self): - size = self.fontMetrics().height() * 3 - return size, size - - def _set_resized_pix(self): - if self._source_pixmap is None: - self.setPixmap(self._empty_pixmap) - return - width, height = self._get_pix_size() - self.setPixmap( - self._source_pixmap.scaled( - width, - height, - QtCore.Qt.KeepAspectRatio, - QtCore.Qt.SmoothTransformation - ) - ) - - def resizeEvent(self, event): - self._set_resized_pix() - super(PixmapLabel, self).resizeEvent(event) +from .dialogs import ( + UpdateDialog, +) class TrayManager: @@ -78,22 +47,26 @@ class TrayManager: self.log = Logger.get_logger(self.__class__.__name__) - system_settings = get_system_settings() + studio_settings = get_studio_settings() - version_check_interval = system_settings["general"].get( - "version_check_interval" + update_check_interval = studio_settings["core"].get( + "update_check_interval" ) - if version_check_interval is None: - version_check_interval = 5 - self._version_check_interval = version_check_interval * 60 * 1000 + if update_check_interval is None: + update_check_interval = 5 + self._update_check_interval = update_check_interval * 60 * 1000 self._addons_manager = TrayAddonsManager() self.errors = [] - self.main_thread_timer = None + self._update_check_timer = None + self._outdated_dialog = None + + self._main_thread_timer = None self._main_thread_callbacks = collections.deque() self._execution_in_progress = None + self._closing = False @property def doubleclick_callback(self): @@ -107,29 +80,25 @@ class TrayManager: if callback: self.execute_in_main_thread(callback) - def _restart_and_install(self): - self.restart(use_expected_version=True) + def show_tray_message(self, title, message, icon=None, msecs=None): + """Show tray message. - def execute_in_main_thread(self, callback, *args, **kwargs): - if isinstance(callback, WrappedCallbackItem): - item = callback - else: - item = WrappedCallbackItem(callback, *args, **kwargs) + Args: + title (str): Title of message. + message (str): Content of message. + icon (QSystemTrayIcon.MessageIcon): Message's icon. Default is + Information icon, may differ by Qt version. + msecs (int): Duration of message visibility in milliseconds. + Default is 10000 msecs, may differ by Qt version. + """ + args = [title, message] + kwargs = {} + if icon: + kwargs["icon"] = icon + if msecs: + kwargs["msecs"] = msecs - self._main_thread_callbacks.append(item) - - return item - - def _main_thread_execution(self): - if self._execution_in_progress: - return - self._execution_in_progress = True - for _ in range(len(self._main_thread_callbacks)): - if self._main_thread_callbacks: - item = self._main_thread_callbacks.popleft() - item.execute() - - self._execution_in_progress = False + self.tray_widget.showMessage(*args, **kwargs) def initialize_addons(self): """Add addons to tray.""" @@ -140,7 +109,9 @@ class TrayManager: self.tray_widget.menu.addMenu(admin_submenu) # Add services if they are - services_submenu = ITrayService.services_submenu(self.tray_widget.menu) + services_submenu = ITrayService.services_submenu( + self.tray_widget.menu + ) self.tray_widget.menu.addMenu(services_submenu) # Add separator @@ -165,39 +136,172 @@ class TrayManager: main_thread_timer.timeout.connect(self._main_thread_execution) main_thread_timer.start() - self.main_thread_timer = main_thread_timer + self._main_thread_timer = main_thread_timer + + update_check_timer = QtCore.QTimer() + if self._update_check_interval > 0: + update_check_timer.timeout.connect(self._on_update_check_timer) + update_check_timer.setInterval(self._update_check_interval) + update_check_timer.start() + self._update_check_timer = update_check_timer self.execute_in_main_thread(self._startup_validations) + def restart(self): + """Restart Tray tool. + + First creates new process with same argument and close current tray. + """ + + self._closing = True + + args = get_ayon_launcher_args() + + # Create a copy of sys.argv + additional_args = list(sys.argv) + # Remove first argument from 'sys.argv' + # - when running from code the first argument is 'start.py' + # - when running from build the first argument is executable + additional_args.pop(0) + additional_args = [ + arg + for arg in additional_args + if arg not in {"--use-staging", "--use-dev"} + ] + + if is_dev_mode_enabled(): + additional_args.append("--use-dev") + elif is_staging_enabled(): + additional_args.append("--use-staging") + + args.extend(additional_args) + + envs = dict(os.environ.items()) + for key in { + "AYON_BUNDLE_NAME", + }: + envs.pop(key, None) + + run_detached_process(args, env=envs) + self.exit() + + def exit(self): + self._closing = True + self.tray_widget.exit() + + def on_exit(self): + self._addons_manager.on_exit() + + def execute_in_main_thread(self, callback, *args, **kwargs): + if isinstance(callback, WrappedCallbackItem): + item = callback + else: + item = WrappedCallbackItem(callback, *args, **kwargs) + + self._main_thread_callbacks.append(item) + + return item + + def _on_update_check_timer(self): + try: + bundles = ayon_api.get_bundles() + user = ayon_api.get_user() + # This is a workaround for bug in ayon-python-api + if user.get("code") == 401: + raise Exception("Unauthorized") + except Exception: + self._revalidate_ayon_auth() + if self._closing: + return + + try: + bundles = ayon_api.get_bundles() + except Exception: + return + + if is_dev_mode_enabled(): + return + + bundle_type = ( + "stagingBundle" + if is_staging_enabled() + else "productionBundle" + ) + + expected_bundle = bundles.get(bundle_type) + current_bundle = os.environ.get("AYON_BUNDLE_NAME") + is_expected = expected_bundle == current_bundle + if is_expected or expected_bundle is None: + self._restart_action.setVisible(False) + if ( + self._outdated_dialog is not None + and self._outdated_dialog.isVisible() + ): + self._outdated_dialog.close_silently() + return + + self._restart_action.setVisible(True) + + if self._outdated_dialog is None: + self._outdated_dialog = UpdateDialog() + self._outdated_dialog.restart_requested.connect( + self._restart_and_install + ) + self._outdated_dialog.ignore_requested.connect( + self._outdated_bundle_ignored + ) + + self._outdated_dialog.show() + self._outdated_dialog.raise_() + self._outdated_dialog.activateWindow() + + def _revalidate_ayon_auth(self): + result = self._show_ayon_login(restart_on_token_change=False) + if self._closing: + return False + + if not result.new_token: + self.exit() + return False + return True + + def _restart_and_install(self): + self.restart() + + def _outdated_bundle_ignored(self): + self.show_tray_message( + "AYON update ignored", + ( + "Please restart AYON launcher as soon as possible" + " to propagate updates." + ) + ) + + def _main_thread_execution(self): + if self._execution_in_progress: + return + self._execution_in_progress = True + for _ in range(len(self._main_thread_callbacks)): + if self._main_thread_callbacks: + item = self._main_thread_callbacks.popleft() + try: + item.execute() + except BaseException: + self.log.erorr( + "Main thread execution failed", exc_info=True + ) + + self._execution_in_progress = False + def _startup_validations(self): """Run possible startup validations.""" - pass - - def show_tray_message(self, title, message, icon=None, msecs=None): - """Show tray message. - - Args: - title (str): Title of message. - message (str): Content of message. - icon (QSystemTrayIcon.MessageIcon): Message's icon. Default is - Information icon, may differ by Qt version. - msecs (int): Duration of message visibility in milliseconds. - Default is 10000 msecs, may differ by Qt version. - """ - args = [title, message] - kwargs = {} - if icon: - kwargs["icon"] = icon - if msecs: - kwargs["msecs"] = msecs - - self.tray_widget.showMessage(*args, **kwargs) + # Trigger bundle validation on start + self._update_check_timer.timeout.emit() def _add_version_item(self): login_action = QtWidgets.QAction("Login", self.tray_widget) login_action.triggered.connect(self._on_ayon_login) self.tray_widget.menu.addAction(login_action) - version_string = os.getenv("AYON_VERSION", "AYON Info") version_action = QtWidgets.QAction(version_string, self.tray_widget) @@ -216,16 +320,24 @@ class TrayManager: self._restart_action = restart_action def _on_ayon_login(self): - self.execute_in_main_thread(self._show_ayon_login) + self.execute_in_main_thread( + self._show_ayon_login, + restart_on_token_change=True + ) - def _show_ayon_login(self): + def _show_ayon_login(self, restart_on_token_change): from ayon_common.connection.credentials import change_user_ui result = change_user_ui() if result.shutdown: self.exit() + return result - elif result.restart or result.token_changed: + restart = result.restart + if restart_on_token_change and result.token_changed: + restart = True + + if restart: # Remove environment variables from current connection # - keep develop, staging, headless values for key in { @@ -235,23 +347,13 @@ class TrayManager: }: os.environ.pop(key, None) self.restart() + return result def _on_restart_action(self): - self.restart(use_expected_version=True) + self.restart() - def restart(self, use_expected_version=False, reset_version=False): - """Restart Tray tool. - - First creates new process with same argument and close current tray. - - Args: - use_expected_version(bool): OpenPype version is set to expected - version. - reset_version(bool): OpenPype version is cleaned up so igniters - logic will decide which version will be used. - """ + def _restart_ayon(self): args = get_ayon_launcher_args() - envs = dict(os.environ.items()) # Create a copy of sys.argv additional_args = list(sys.argv) @@ -259,35 +361,28 @@ class TrayManager: # - when running from code the first argument is 'start.py' # - when running from build the first argument is executable additional_args.pop(0) + additional_args = [ + arg + for arg in additional_args + if arg not in {"--use-staging", "--use-dev"} + ] - cleanup_additional_args = False - if use_expected_version: - cleanup_additional_args = True - reset_version = True - - # Pop OPENPYPE_VERSION - if reset_version: - cleanup_additional_args = True - envs.pop("OPENPYPE_VERSION", None) - - if cleanup_additional_args: - _additional_args = [] - for arg in additional_args: - if arg == "--use-staging" or arg.startswith("--use-version"): - continue - _additional_args.append(arg) - additional_args = _additional_args + if is_dev_mode_enabled(): + additional_args.append("--use-dev") + elif is_staging_enabled(): + additional_args.append("--use-staging") args.extend(additional_args) + + envs = dict(os.environ.items()) + for key in { + "AYON_BUNDLE_NAME", + }: + envs.pop(key, None) + run_detached_process(args, env=envs) self.exit() - def exit(self): - self.tray_widget.exit() - - def on_exit(self): - self._addons_manager.on_exit() - def _on_version_action(self): if self._info_widget is None: self._info_widget = InfoWidget() diff --git a/client/ayon_core/tools/traypublisher/window.py b/client/ayon_core/tools/traypublisher/window.py index dad314e510..210e77f0fa 100644 --- a/client/ayon_core/tools/traypublisher/window.py +++ b/client/ayon_core/tools/traypublisher/window.py @@ -10,51 +10,47 @@ import platform from qtpy import QtWidgets, QtCore import qtawesome -import appdirs -from ayon_core.lib import JSONSettingRegistry +from ayon_core.lib import AYONSettingsRegistry, is_running_from_build from ayon_core.pipeline import install_host from ayon_core.hosts.traypublisher.api import TrayPublisherHost from ayon_core.tools.publisher.control_qt import QtPublisherController from ayon_core.tools.publisher.window import PublisherWindow from ayon_core.tools.utils import PlaceholderLineEdit, get_ayon_qt_app -from ayon_core.tools.utils.constants import PROJECT_NAME_ROLE -from ayon_core.tools.utils.models import ( - ProjectModel, - ProjectSortFilterProxy +from ayon_core.tools.ayon_utils.models import ProjectsModel +from ayon_core.tools.ayon_utils.widgets import ( + ProjectsQtModel, + ProjectSortFilterProxy, + PROJECT_NAME_ROLE, ) +class TrayPublisherRegistry(AYONSettingsRegistry): + def __init__(self): + super(TrayPublisherRegistry, self).__init__("traypublisher") + + class TrayPublisherController(QtPublisherController): + def __init__(self, *args, **kwargs): + super(TrayPublisherController, self).__init__(*args, **kwargs) + self._projects_model = ProjectsModel(self) + @property def host(self): return self._host - def reset_project_data_cache(self): + def reset_hierarchy_cache(self): + self._hierarchy_model.reset() self._asset_docs_cache.reset() - -class TrayPublisherRegistry(JSONSettingRegistry): - """Class handling OpenPype general settings registry. - - Attributes: - vendor (str): Name used for path construction. - product (str): Additional name used for path construction. - - """ - - def __init__(self): - self.vendor = "pypeclub" - self.product = "openpype" - name = "tray_publisher" - path = appdirs.user_data_dir(self.product, self.vendor) - super(TrayPublisherRegistry, self).__init__(name, path) + def get_project_items(self, sender=None): + return self._projects_model.get_project_items(sender) class StandaloneOverlayWidget(QtWidgets.QFrame): project_selected = QtCore.Signal(str) - def __init__(self, publisher_window): + def __init__(self, controller, publisher_window): super(StandaloneOverlayWidget, self).__init__(publisher_window) self.setObjectName("OverlayFrame") @@ -66,7 +62,7 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): header_label = QtWidgets.QLabel("Choose project", content_widget) header_label.setObjectName("ChooseProjectLabel") # Create project models and view - projects_model = ProjectModel() + projects_model = ProjectsQtModel(controller) projects_proxy = ProjectSortFilterProxy() projects_proxy.setSourceModel(projects_model) projects_proxy.setFilterKeyColumn(0) @@ -137,12 +133,11 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): project_name = None if project_name: - index = None - src_index = self._projects_model.find_project(project_name) - if src_index is not None: - index = self._projects_proxy.mapFromSource(src_index) - - if index is not None: + src_index = self._projects_model.get_index_by_project_name( + project_name + ) + index = self._projects_proxy.mapFromSource(src_index) + if index.isValid(): selection_model = self._projects_view.selectionModel() selection_model.select( index, @@ -201,7 +196,7 @@ class TrayPublishWindow(PublisherWindow): self.setWindowFlags(flags) - overlay_widget = StandaloneOverlayWidget(self) + overlay_widget = StandaloneOverlayWidget(controller, self) btns_widget = self._header_extra_widget @@ -248,7 +243,7 @@ class TrayPublishWindow(PublisherWindow): def _on_project_select(self, project_name): # TODO register project specific plugin paths self._controller.save_changes(False) - self._controller.reset_project_data_cache() + self._controller.reset_hierarchy_cache() self.reset() if not self._controller.instances: @@ -265,7 +260,7 @@ def main(): app_instance = get_ayon_qt_app() - if platform.system().lower() == "windows": + if not is_running_from_build() and platform.system().lower() == "windows": import ctypes ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID( u"traypublisher" diff --git a/client/ayon_core/tools/utils/__init__.py b/client/ayon_core/tools/utils/__init__.py index 7be0ea5e9f..445b4d9b97 100644 --- a/client/ayon_core/tools/utils/__init__.py +++ b/client/ayon_core/tools/utils/__init__.py @@ -37,10 +37,6 @@ from .lib import ( get_qt_app, get_ayon_qt_app, get_openpype_qt_app, - get_asset_icon, - get_asset_icon_by_name, - get_asset_icon_name_from_doc, - get_asset_icon_color_from_doc, ) from .models import ( @@ -100,10 +96,6 @@ __all__ = ( "get_qt_app", "get_ayon_qt_app", "get_openpype_qt_app", - "get_asset_icon", - "get_asset_icon_by_name", - "get_asset_icon_name_from_doc", - "get_asset_icon_color_from_doc", "RecursiveSortFilterProxyModel", diff --git a/client/ayon_core/tools/utils/assets_widget.py b/client/ayon_core/tools/utils/assets_widget.py index c05f3de850..7c3fd8d97c 100644 --- a/client/ayon_core/tools/utils/assets_widget.py +++ b/client/ayon_core/tools/utils/assets_widget.py @@ -10,6 +10,7 @@ from ayon_core.client import ( ) from ayon_core.style import ( get_default_tools_icon_color, + get_default_entity_icon_color, ) from ayon_core.tools.flickcharm import FlickCharm @@ -21,7 +22,7 @@ from .widgets import PlaceholderLineEdit from .models import RecursiveSortFilterProxyModel from .lib import ( DynamicQThread, - get_asset_icon + get_qta_icon_by_name_and_color ) ASSET_ID_ROLE = QtCore.Qt.UserRole + 1 @@ -31,6 +32,59 @@ ASSET_UNDERLINE_COLORS_ROLE = QtCore.Qt.UserRole + 4 ASSET_PATH_ROLE = QtCore.Qt.UserRole + 5 +def _get_default_asset_icon_name(has_children): + if has_children: + return "fa.folder" + return "fa.folder-o" + + +def _get_asset_icon_color_from_doc(asset_doc): + if asset_doc: + return asset_doc["data"].get("color") + return None + + +def _get_asset_icon_name_from_doc(asset_doc): + if asset_doc: + return asset_doc["data"].get("icon") + return None + + +def _get_asset_icon_color(asset_doc): + icon_color = _get_asset_icon_color_from_doc(asset_doc) + if icon_color: + return icon_color + return get_default_entity_icon_color() + + +def _get_asset_icon_name(asset_doc, has_children=True): + icon_name = _get_asset_icon_name_from_doc(asset_doc) + if icon_name: + return icon_name + return _get_default_asset_icon_name(has_children) + + +def get_asset_icon(asset_doc, has_children=False): + """Get asset icon. + + Deprecated: + This function will be removed in future releases. Use on your own + risk. + + Args: + asset_doc (dict): Asset document. + has_children (Optional[bool]): Asset has children assets. + + Returns: + QIcon: Asset icon. + + """ + icon_name = _get_asset_icon_name(asset_doc, has_children) + icon_color = _get_asset_icon_color(asset_doc) + + return get_qta_icon_by_name_and_color(icon_name, icon_color) + + class _AssetsView(TreeViewSpinner, DeselectableTreeView): """Asset items view. @@ -111,7 +165,6 @@ class _AssetModel(QtGui.QStandardItemModel): 'refreshed' signal. Args: - dbcon (AvalonMongoDB): Ready to use connection to mongo with. parent (QObject): Parent Qt object. """ @@ -128,9 +181,8 @@ class _AssetModel(QtGui.QStandardItemModel): "data.color": 1 } - def __init__(self, dbcon, parent=None): + def __init__(self, parent=None): super(_AssetModel, self).__init__(parent=parent) - self.dbcon = dbcon self._refreshing = False self._doc_fetching_thread = None @@ -142,6 +194,7 @@ class _AssetModel(QtGui.QStandardItemModel): self._item_ids_with_color = set() self._items_by_asset_id = {} + self._project_name = None self._last_project_name = None @property @@ -185,6 +238,16 @@ class _AssetModel(QtGui.QStandardItemModel): return self.get_indexes_by_asset_ids(asset_ids) + def get_project_name(self): + return self._project_name + + def set_project_name(self, project_name, refresh): + if self._project_name == project_name: + return + self._project_name = project_name + if refresh: + self.refresh() + def refresh(self, force=False): """Refresh the data for the model. @@ -197,7 +260,7 @@ class _AssetModel(QtGui.QStandardItemModel): return self.stop_refresh() - project_name = self.dbcon.Session.get("AVALON_PROJECT") + project_name = self._project_name clear_model = False if project_name != self._last_project_name: clear_model = True @@ -216,23 +279,6 @@ class _AssetModel(QtGui.QStandardItemModel): def stop_refresh(self): self._stop_fetch_thread() - def clear_underlines(self): - for asset_id in set(self._item_ids_with_color): - self._item_ids_with_color.remove(asset_id) - item = self._items_by_asset_id.get(asset_id) - if item is not None: - item.setData(None, ASSET_UNDERLINE_COLORS_ROLE) - - def set_underline_colors(self, colors_by_asset_id): - self.clear_underlines() - - for asset_id, colors in colors_by_asset_id.items(): - item = self._items_by_asset_id.get(asset_id) - if item is None: - continue - item.setData(colors, ASSET_UNDERLINE_COLORS_ROLE) - self._item_ids_with_color.add(asset_id) - def _clear_items(self): root_item = self.invisibleRootItem() root_item.removeRows(0, root_item.rowCount()) @@ -357,7 +403,7 @@ class _AssetModel(QtGui.QStandardItemModel): self._doc_fetched.emit() def _fetch_asset_docs(self): - project_name = self.dbcon.current_project() + project_name = self.get_project_name() if not project_name: return [] @@ -392,7 +438,6 @@ class _AssetsWidget(QtWidgets.QWidget): inheritance changes. Args: - dbcon (AvalonMongoDB): Connection to avalon mongo db. parent (QWidget): Parent Qt widget. """ @@ -404,11 +449,9 @@ class _AssetsWidget(QtWidgets.QWidget): # It was double clicked on view double_clicked = QtCore.Signal() - def __init__(self, dbcon, parent=None): + def __init__(self, parent=None): super(_AssetsWidget, self).__init__(parent=parent) - self.dbcon = dbcon - # Tree View model = self._create_source_model() proxy = self._create_proxy_model(model) @@ -477,18 +520,28 @@ class _AssetsWidget(QtWidgets.QWidget): self._model = model self._proxy = proxy self._view = view - self._last_project_name = None self._last_btns_height = None + self._current_asset_name = None + self.model_selection = {} @property def header_widget(self): return self._header_widget + def get_project_name(self): + self._model.get_project_name() + + def set_project_name(self, project_name, refresh=True): + self._model.set_project_name(project_name, refresh) + + def set_current_asset_name(self, asset_name): + self._current_asset_name = asset_name + def _create_source_model(self): - model = _AssetModel(dbcon=self.dbcon, parent=self) + model = _AssetModel(parent=self) model.refreshed.connect(self._on_model_refresh) return model @@ -509,8 +562,8 @@ class _AssetsWidget(QtWidgets.QWidget): def stop_refresh(self): self._model.stop_refresh() - def _get_current_session_asset(self): - return self.dbcon.Session.get("AVALON_ASSET") + def _get_current_asset_name(self): + return self._current_asset_name def _on_current_asset_click(self): """Trigger change of asset to current context asset. @@ -518,10 +571,10 @@ class _AssetsWidget(QtWidgets.QWidget): in differnt way. """ - self.set_current_session_asset() + self.select_current_asset() - def set_current_session_asset(self): - asset_name = self._get_current_session_asset() + def select_current_asset(self): + asset_name = self._get_current_asset_name() if asset_name: self.select_asset_by_name(asset_name) diff --git a/client/ayon_core/tools/utils/lib.py b/client/ayon_core/tools/utils/lib.py index b7edd6be71..e785cec390 100644 --- a/client/ayon_core/tools/utils/lib.py +++ b/client/ayon_core/tools/utils/lib.py @@ -234,62 +234,6 @@ def get_qta_icon_by_name_and_color(icon_name, icon_color): return icon -def get_asset_icon_name(asset_doc, has_children=True): - icon_name = get_asset_icon_name_from_doc(asset_doc) - if icon_name: - return icon_name - return get_default_asset_icon_name(has_children) - - -def get_asset_icon_color(asset_doc): - icon_color = get_asset_icon_color_from_doc(asset_doc) - if icon_color: - return icon_color - return get_default_entity_icon_color() - - -def get_default_asset_icon_name(has_children): - if has_children: - return "fa.folder" - return "fa.folder-o" - - -def get_asset_icon_name_from_doc(asset_doc): - if asset_doc: - return asset_doc["data"].get("icon") - return None - - -def get_asset_icon_color_from_doc(asset_doc): - if asset_doc: - return asset_doc["data"].get("color") - return None - - -def get_asset_icon_by_name(icon_name, icon_color, has_children=False): - if not icon_name: - icon_name = get_default_asset_icon_name(has_children) - - if icon_color: - icon_color = QtGui.QColor(icon_color) - else: - icon_color = get_default_entity_icon_color() - icon = get_qta_icon_by_name_and_color(icon_name, icon_color) - if icon is not None: - return icon - return get_qta_icon_by_name_and_color( - get_default_asset_icon_name(has_children), - icon_color - ) - - -def get_asset_icon(asset_doc, has_children=False): - icon_name = get_asset_icon_name(asset_doc, has_children) - icon_color = get_asset_icon_color(asset_doc) - - return get_qta_icon_by_name_and_color(icon_name, icon_color) - - def get_default_task_icon(color=None): if color is None: color = get_default_entity_icon_color() diff --git a/client/ayon_core/tools/utils/models.py b/client/ayon_core/tools/utils/models.py index e60d85b4e4..a4b6ad7885 100644 --- a/client/ayon_core/tools/utils/models.py +++ b/client/ayon_core/tools/utils/models.py @@ -243,160 +243,3 @@ class RecursiveSortFilterProxyModel(QtCore.QSortFilterProxyModel): return super(RecursiveSortFilterProxyModel, self).filterAcceptsRow( row, parent_index ) - - -# TODO remove 'ProjectModel' and 'ProjectSortFilterProxy' classes -# - replace their usage with current 'ayon_utils' models -class ProjectModel(QtGui.QStandardItemModel): - def __init__( - self, only_active=True, add_default_project=False, *args, **kwargs - ): - super(ProjectModel, self).__init__(*args, **kwargs) - - self._only_active = only_active - self._add_default_project = add_default_project - - self._default_item = None - self._items_by_name = {} - self._refreshed = False - - def set_default_project_available(self, available=True): - if available is None: - available = not self._add_default_project - - if self._add_default_project == available: - return - - self._add_default_project = available - if not available and self._default_item is not None: - root_item = self.invisibleRootItem() - root_item.removeRow(self._default_item.row()) - self._default_item = None - - def set_only_active(self, only_active=True): - if only_active is None: - only_active = not self._only_active - - if self._only_active == only_active: - return - - self._only_active = only_active - - if self._refreshed: - self.refresh() - - def project_name_is_available(self, project_name): - """Check availability of project name in current items.""" - return project_name in self._items_by_name - - def refresh(self): - # Change '_refreshed' state - self._refreshed = True - new_items = [] - # Add default item to model if should - if self._add_default_project and self._default_item is None: - item = QtGui.QStandardItem(DEFAULT_PROJECT_LABEL) - item.setData(None, PROJECT_NAME_ROLE) - item.setData(True, PROJECT_IS_ACTIVE_ROLE) - new_items.append(item) - self._default_item = item - - project_names = set() - project_docs = get_projects( - inactive=not self._only_active, - fields=["name", "data.active"] - ) - for project_doc in project_docs: - project_name = project_doc["name"] - project_names.add(project_name) - if project_name in self._items_by_name: - item = self._items_by_name[project_name] - else: - item = QtGui.QStandardItem(project_name) - - self._items_by_name[project_name] = item - new_items.append(item) - - is_active = project_doc.get("data", {}).get("active", True) - item.setData(project_name, PROJECT_NAME_ROLE) - item.setData(is_active, PROJECT_IS_ACTIVE_ROLE) - - if not is_active: - font = item.font() - font.setItalic(True) - item.setFont(font) - - root_item = self.invisibleRootItem() - for project_name in tuple(self._items_by_name.keys()): - if project_name not in project_names: - item = self._items_by_name.pop(project_name) - root_item.removeRow(item.row()) - - if new_items: - root_item.appendRows(new_items) - - def find_project(self, project_name): - """ - Get index of 'project_name' value. - - Args: - project_name (str): - Returns: - (QModelIndex) - """ - val = self._items_by_name.get(project_name) - if val: - return self.indexFromItem(val) - - -class ProjectSortFilterProxy(QtCore.QSortFilterProxyModel): - def __init__(self, *args, **kwargs): - super(ProjectSortFilterProxy, self).__init__(*args, **kwargs) - self._filter_enabled = True - # Disable case sensitivity - self.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive) - - def lessThan(self, left_index, right_index): - if left_index.data(PROJECT_NAME_ROLE) is None: - return True - - if right_index.data(PROJECT_NAME_ROLE) is None: - return False - - left_is_active = left_index.data(PROJECT_IS_ACTIVE_ROLE) - right_is_active = right_index.data(PROJECT_IS_ACTIVE_ROLE) - if right_is_active == left_is_active: - return super(ProjectSortFilterProxy, self).lessThan( - left_index, right_index - ) - - if left_is_active: - return True - return False - - def filterAcceptsRow(self, source_row, source_parent): - index = self.sourceModel().index(source_row, 0, source_parent) - string_pattern = self.filterRegularExpression().pattern() - if self._filter_enabled: - result = self._custom_index_filter(index) - if result is not None: - project_name = index.data(PROJECT_NAME_ROLE) - if project_name is None: - return result - return string_pattern.lower() in project_name.lower() - - return super(ProjectSortFilterProxy, self).filterAcceptsRow( - source_row, source_parent - ) - - def _custom_index_filter(self, index): - is_active = bool(index.data(PROJECT_IS_ACTIVE_ROLE)) - - return is_active - - def is_filter_enabled(self): - return self._filter_enabled - - def set_filter_enabled(self, value): - self._filter_enabled = value - self.invalidateFilter() diff --git a/client/ayon_core/tools/utils/tasks_widget.py b/client/ayon_core/tools/utils/tasks_widget.py deleted file mode 100644 index 12e074f910..0000000000 --- a/client/ayon_core/tools/utils/tasks_widget.py +++ /dev/null @@ -1,303 +0,0 @@ -from qtpy import QtWidgets, QtCore, QtGui -import qtawesome - -from ayon_core.client import ( - get_project, - get_asset_by_id, -) -from ayon_core.style import get_disabled_entity_icon_color -from ayon_core.tools.utils.lib import get_task_icon - -from .views import DeselectableTreeView - - -TASK_NAME_ROLE = QtCore.Qt.UserRole + 1 -TASK_TYPE_ROLE = QtCore.Qt.UserRole + 2 -TASK_ORDER_ROLE = QtCore.Qt.UserRole + 3 -TASK_ASSIGNEE_ROLE = QtCore.Qt.UserRole + 4 - - -class _TasksModel(QtGui.QStandardItemModel): - """A model listing the tasks combined for a list of assets""" - - def __init__(self, dbcon, parent=None): - super(_TasksModel, self).__init__(parent=parent) - self.dbcon = dbcon - self.setHeaderData( - 0, QtCore.Qt.Horizontal, "Tasks", QtCore.Qt.DisplayRole - ) - - self._no_tasks_icon = qtawesome.icon( - "fa.exclamation-circle", - color=get_disabled_entity_icon_color() - ) - self._cached_icons = {} - self._project_doc = {} - - self._empty_tasks_item = None - self._last_asset_id = None - self._loaded_project_name = None - - def _context_is_valid(self): - if self._get_current_project(): - return True - return False - - def refresh(self): - self._refresh_project_doc() - self.set_asset_id(self._last_asset_id) - - def _refresh_project_doc(self): - # Get the project configured icons from database - project_doc = {} - if self._context_is_valid(): - project_name = self.dbcon.active_project() - project_doc = get_project(project_name) - - self._loaded_project_name = self._get_current_project() - self._project_doc = project_doc - - def headerData(self, section, orientation, role=None): - if role is None: - role = QtCore.Qt.EditRole - # Show nice labels in the header - if section == 0: - if ( - role in (QtCore.Qt.DisplayRole, QtCore.Qt.EditRole) - and orientation == QtCore.Qt.Horizontal - ): - return "Tasks" - - return super(_TasksModel, self).headerData(section, orientation, role) - - def _get_current_project(self): - return self.dbcon.Session.get("AVALON_PROJECT") - - def set_asset_id(self, asset_id): - asset_doc = None - if asset_id and self._context_is_valid(): - project_name = self._get_current_project() - asset_doc = get_asset_by_id( - project_name, asset_id, fields=["data.tasks"] - ) - self._set_asset(asset_doc) - - def _get_empty_task_item(self): - if self._empty_tasks_item is None: - item = QtGui.QStandardItem("No task") - item.setData(self._no_tasks_icon, QtCore.Qt.DecorationRole) - item.setFlags(QtCore.Qt.NoItemFlags) - self._empty_tasks_item = item - return self._empty_tasks_item - - def _set_asset(self, asset_doc): - """Set assets to track by their database id - - Arguments: - asset_doc (dict): Asset document from MongoDB. - """ - if self._loaded_project_name != self._get_current_project(): - self._refresh_project_doc() - - asset_tasks = {} - self._last_asset_id = None - if asset_doc: - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - self._last_asset_id = asset_doc["_id"] - - root_item = self.invisibleRootItem() - root_item.removeRows(0, root_item.rowCount()) - - items = [] - - for task_name, task_info in asset_tasks.items(): - task_type = task_info.get("type") - task_order = task_info.get("order") - icon = get_task_icon(self._project_doc, asset_doc, task_name) - - task_assignees = set() - assignees_data = task_info.get("assignees") or [] - for assignee in assignees_data: - username = assignee.get("username") - if username: - task_assignees.add(username) - - label = "{} ({})".format(task_name, task_type or "type N/A") - item = QtGui.QStandardItem(label) - item.setData(task_name, TASK_NAME_ROLE) - item.setData(task_type, TASK_TYPE_ROLE) - item.setData(task_order, TASK_ORDER_ROLE) - item.setData(task_assignees, TASK_ASSIGNEE_ROLE) - item.setData(icon, QtCore.Qt.DecorationRole) - item.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable) - items.append(item) - - if not items: - item = QtGui.QStandardItem("No task") - item.setData(self._no_tasks_icon, QtCore.Qt.DecorationRole) - item.setFlags(QtCore.Qt.NoItemFlags) - items.append(item) - - root_item.appendRows(items) - - -class _TasksProxyModel(QtCore.QSortFilterProxyModel): - def lessThan(self, x_index, y_index): - x_order = x_index.data(TASK_ORDER_ROLE) - y_order = y_index.data(TASK_ORDER_ROLE) - if x_order is not None and y_order is not None: - if x_order < y_order: - return True - if x_order > y_order: - return False - - elif x_order is None and y_order is not None: - return True - - elif y_order is None and x_order is not None: - return False - - x_name = x_index.data(QtCore.Qt.DisplayRole) - y_name = y_index.data(QtCore.Qt.DisplayRole) - if x_name == y_name: - return True - - if x_name == tuple(sorted((x_name, y_name)))[0]: - return True - return False - - -class TasksWidget(QtWidgets.QWidget): - """Widget showing active Tasks - - Deprecated: - This widget will be removed soon. Please do not use it in new code. - """ - - task_changed = QtCore.Signal() - - def __init__(self, dbcon, parent=None): - self._dbcon = dbcon - - super(TasksWidget, self).__init__(parent) - - tasks_view = DeselectableTreeView(self) - tasks_view.setIndentation(0) - tasks_view.setSortingEnabled(True) - tasks_view.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers) - - header_view = tasks_view.header() - header_view.setSortIndicator(0, QtCore.Qt.AscendingOrder) - - tasks_model = self._create_source_model() - tasks_proxy = self._create_proxy_model(tasks_model) - tasks_view.setModel(tasks_proxy) - - layout = QtWidgets.QVBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(tasks_view) - - selection_model = tasks_view.selectionModel() - selection_model.selectionChanged.connect(self._on_task_change) - - self._tasks_model = tasks_model - self._tasks_proxy = tasks_proxy - self._tasks_view = tasks_view - - self._last_selected_task_name = None - - def _create_source_model(self): - """Create source model of tasks widget. - - Model must have available 'refresh' method and 'set_asset_id' to change - context of asset. - """ - return _TasksModel(self._dbcon) - - def _create_proxy_model(self, source_model): - proxy = _TasksProxyModel() - proxy.setSourceModel(source_model) - return proxy - - def refresh(self): - self._tasks_model.refresh() - - def set_asset_id(self, asset_id): - # Try and preserve the last selected task and reselect it - # after switching assets. If there's no currently selected - # asset keep whatever the "last selected" was prior to it. - current = self.get_selected_task_name() - if current: - self._last_selected_task_name = current - - self._tasks_model.set_asset_id(asset_id) - - if self._last_selected_task_name: - self.select_task_name(self._last_selected_task_name) - - # Force a task changed emit. - self.task_changed.emit() - - def _clear_selection(self): - selection_model = self._tasks_view.selectionModel() - selection_model.clearSelection() - - def select_task_name(self, task_name): - """Select a task by name. - - If the task does not exist in the current model then selection is only - cleared. - - Args: - task (str): Name of the task to select. - - """ - task_view_model = self._tasks_view.model() - if not task_view_model: - return - - # Clear selection - selection_model = self._tasks_view.selectionModel() - selection_model.clearSelection() - - # Select the task - mode = ( - QtCore.QItemSelectionModel.Select - | QtCore.QItemSelectionModel.Rows - ) - for row in range(task_view_model.rowCount()): - index = task_view_model.index(row, 0) - name = index.data(TASK_NAME_ROLE) - if name == task_name: - selection_model.select(index, mode) - - # Set the currently active index - self._tasks_view.setCurrentIndex(index) - break - - last_selected_task_name = self.get_selected_task_name() - if last_selected_task_name: - self._last_selected_task_name = last_selected_task_name - - def get_selected_task_name(self): - """Return name of task at current index (selected) - - Returns: - str: Name of the current task. - - """ - index = self._tasks_view.currentIndex() - selection_model = self._tasks_view.selectionModel() - if index.isValid() and selection_model.isSelected(index): - return index.data(TASK_NAME_ROLE) - return None - - def get_selected_task_type(self): - index = self._tasks_view.currentIndex() - selection_model = self._tasks_view.selectionModel() - if index.isValid() and selection_model.isSelected(index): - return index.data(TASK_TYPE_ROLE) - return None - - def _on_task_change(self): - self.task_changed.emit() diff --git a/client/ayon_core/tools/workfile_template_build/__init__.py b/client/ayon_core/tools/workfile_template_build/__init__.py index 82a22aea50..ad94ebcf79 100644 --- a/client/ayon_core/tools/workfile_template_build/__init__.py +++ b/client/ayon_core/tools/workfile_template_build/__init__.py @@ -1,5 +1,8 @@ from .window import WorkfileBuildPlaceholderDialog +from .lib import open_template_ui __all__ = ( "WorkfileBuildPlaceholderDialog", + + "open_template_ui" ) diff --git a/client/ayon_core/tools/workfile_template_build/lib.py b/client/ayon_core/tools/workfile_template_build/lib.py new file mode 100644 index 0000000000..de3a0d0084 --- /dev/null +++ b/client/ayon_core/tools/workfile_template_build/lib.py @@ -0,0 +1,28 @@ +import traceback + +from qtpy import QtWidgets + +from ayon_core.tools.utils.dialogs import show_message_dialog + + +def open_template_ui(builder, main_window): + """Open template from `builder` + + Asks user about overwriting current scene and feedsback exceptions. + """ + result = QtWidgets.QMessageBox.question( + main_window, + "Opening template", + "Caution! You will loose unsaved changes.\nDo you want to continue?", + QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No + ) + if result == QtWidgets.QMessageBox.Yes: + try: + builder.open_template() + except Exception: + show_message_dialog( + title="Template Load Failed", + message="".join(traceback.format_exc()), + parent=main_window, + level="critical" + ) diff --git a/client/ayon_core/tools/workfile_template_build/window.py b/client/ayon_core/tools/workfile_template_build/window.py index 7f95bac60a..feb11c5e75 100644 --- a/client/ayon_core/tools/workfile_template_build/window.py +++ b/client/ayon_core/tools/workfile_template_build/window.py @@ -1,8 +1,9 @@ +import os + from qtpy import QtWidgets from ayon_core import style from ayon_core.lib import Logger -from ayon_core.pipeline import legacy_io from ayon_core.tools.attribute_defs import AttributeDefinitionsWidget @@ -26,7 +27,7 @@ class WorkfileBuildPlaceholderDialog(QtWidgets.QDialog): host_name = getattr(self._host, "name", None) if not host_name: - host_name = legacy_io.Session.get("AVALON_APP") or "NA" + host_name = os.getenv("AYON_HOST_NAME") or "NA" self._host_name = host_name plugins_combo = QtWidgets.QComboBox(self) diff --git a/client/ayon_core/tools/workfiles/control.py b/client/ayon_core/tools/workfiles/control.py index c1e513d12c..86c6a62a11 100644 --- a/client/ayon_core/tools/workfiles/control.py +++ b/client/ayon_core/tools/workfiles/control.py @@ -156,7 +156,7 @@ class BaseWorkfileController( self._log = None self._current_project_name = None - self._current_folder_name = None + self._current_folder_path = None self._current_folder_id = None self._current_task_name = None self._save_is_enabled = True @@ -468,12 +468,12 @@ class BaseWorkfileController( context = self._get_host_current_context() project_name = context["project_name"] - folder_name = context["asset_name"] + folder_path = context["folder_path"] task_name = context["task_name"] current_file = self.get_current_workfile() folder_id = None - if folder_name: - folder = ayon_api.get_folder_by_path(project_name, folder_name) + if folder_path: + folder = ayon_api.get_folder_by_path(project_name, folder_path) if folder: folder_id = folder["id"] @@ -481,7 +481,7 @@ class BaseWorkfileController( self._project_anatomy = None self._current_project_name = project_name - self._current_folder_name = folder_name + self._current_folder_path = folder_path self._current_folder_id = folder_id self._current_task_name = task_name @@ -639,6 +639,7 @@ class BaseWorkfileController( return { "project_name": project_name, "folder_id": folder_id, + "folder_path": folder["path"], "asset_id": folder_id, "asset_name": folder["name"], "task_id": task_id, diff --git a/client/ayon_core/tools/workfiles/models/workfiles.py b/client/ayon_core/tools/workfiles/models/workfiles.py index 55653e34d4..1e9491b3d7 100644 --- a/client/ayon_core/tools/workfiles/models/workfiles.py +++ b/client/ayon_core/tools/workfiles/models/workfiles.py @@ -246,7 +246,7 @@ class WorkareaModel: self._controller.get_host_name(), task_name=task_info.get("name"), task_type=task_info.get("type"), - family="workfile", + product_type="workfile", project_settings=self._controller.project_settings, ) else: @@ -630,7 +630,7 @@ class PublishWorkfilesModel: if not product_ids: return output - # Get version docs of subsets with their families + # Get version docs of products with their families version_entities = ayon_api.get_versions( project_name, product_ids=product_ids, diff --git a/client/ayon_core/tools/workfiles/widgets/save_as_dialog.py b/client/ayon_core/tools/workfiles/widgets/save_as_dialog.py index 2ed2dd0659..77dac1198a 100644 --- a/client/ayon_core/tools/workfiles/widgets/save_as_dialog.py +++ b/client/ayon_core/tools/workfiles/widgets/save_as_dialog.py @@ -49,7 +49,7 @@ class SubversionLineEdit(QtWidgets.QWidget): self._input_field.setText(action.text()) def _update(self, values): - """Create optional predefined subset names + """Create optional predefined product names Args: default_names(list): all predefined names diff --git a/client/ayon_core/version.py b/client/ayon_core/version.py index 914e415b8c..f3ad9713d5 100644 --- a/client/ayon_core/version.py +++ b/client/ayon_core/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring AYON core addon version.""" -__version__ = "0.2.1-dev.1" +__version__ = "0.3.0-dev.1" diff --git a/client/pyproject.toml b/client/pyproject.toml index c21ca305a7..7b4329a31a 100644 --- a/client/pyproject.toml +++ b/client/pyproject.toml @@ -10,8 +10,6 @@ wsrpc_aiohttp = "^3.1.1" # websocket server Click = "^8" clique = "1.6.*" jsonschema = "^2.6.0" -pymongo = "^3.11.2" -log4mongo = "^1.7" pyblish-base = "^1.8.11" pynput = "^1.7.2" # Timers manager - TODO remove speedcopy = "^2.1" diff --git a/create_package.py b/create_package.py index 94b31a03f2..48952c43c5 100644 --- a/create_package.py +++ b/create_package.py @@ -279,7 +279,8 @@ def create_server_package( def main( output_dir: Optional[str]=None, skip_zip: bool=False, - keep_sources: bool=False + keep_sources: bool=False, + clear_output_dir: bool=False ): log = logging.getLogger("create_package") log.info("Start creating package") @@ -292,7 +293,8 @@ def main( new_created_version_dir = os.path.join( output_dir, ADDON_NAME, ADDON_VERSION ) - if os.path.isdir(new_created_version_dir): + + if os.path.isdir(new_created_version_dir) and clear_output_dir: log.info(f"Purging {new_created_version_dir}") shutil.rmtree(output_dir) @@ -339,6 +341,15 @@ if __name__ == "__main__": "Keep folder structure when server package is created." ) ) + parser.add_argument( + "-c", "--clear-output-dir", + dest="clear_output_dir", + action="store_true", + help=( + "Clear output directory before package creation." + ) + ) + parser.add_argument( "-o", "--output", dest="output_dir", @@ -350,4 +361,9 @@ if __name__ == "__main__": ) args = parser.parse_args(sys.argv[1:]) - main(args.output_dir, args.skip_zip, args.keep_sources) + main( + args.output_dir, + args.skip_zip, + args.keep_sources, + args.clear_output_dir + ) diff --git a/package.py b/package.py index d7266d852b..470bbf256b 100644 --- a/package.py +++ b/package.py @@ -1,8 +1,11 @@ name = "core" title = "Core" -version = "0.2.1-dev.1" +version = "0.3.0-dev.1" client_dir = "ayon_core" plugin_for = ["ayon_server"] -ayon_version = ">=1.0.3,<2.0.0" +requires = [ + "~ayon_server-1.0.3+<2.0.0", +] + diff --git a/server/settings/main.py b/server/settings/main.py index 1bdfcefe19..28a69e182d 100644 --- a/server/settings/main.py +++ b/server/settings/main.py @@ -121,6 +121,11 @@ class CoreSettings(BaseSettingsModel): widget="textarea", scope=["studio"], ) + update_check_interval: int = SettingsField( + 5, + title="Update check interval (minutes)", + ge=0 + ) disk_mapping: DiskMappingModel = SettingsField( default_factory=DiskMappingModel, title="Disk mapping", diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index 7aa86aafa6..9b5f3ae571 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -9,7 +9,7 @@ from ayon_server.settings import ( task_types_enum, ) -from ayon_server.types import ColorRGBA_uint8 +from ayon_server.types import ColorRGB_uint8, ColorRGBA_uint8 class ValidateBaseModel(BaseSettingsModel): @@ -176,6 +176,10 @@ class ExtractThumbnailOIIODefaultsModel(BaseSettingsModel): class ExtractThumbnailModel(BaseSettingsModel): _isGroup = True enabled: bool = SettingsField(True) + product_names: list[str] = SettingsField( + default_factory=list, + title="Product names" + ) integrate_thumbnail: bool = SettingsField( True, title="Integrate Thumbnail Representation" @@ -391,6 +395,14 @@ class ExtractReviewOutputDefModel(BaseSettingsModel): " same as output aspect ratio." ) ) + # overscan_color: ColorRGB_uint8 = SettingsField( + # (0, 0, 0), + # title="Overscan color", + # description=( + # "Overscan color is used when input aspect ratio is not" + # " same as output aspect ratio." + # ) + # ) width: int = SettingsField( 0, ge=0, @@ -844,6 +856,7 @@ DEFAULT_PUBLISH_VALUES = { }, "ExtractThumbnail": { "enabled": True, + "product_names": [], "integrate_thumbnail": True, "target_size": { "type": "source" @@ -896,7 +909,8 @@ DEFAULT_PUBLISH_VALUES = { "single_frame_filter": "single_frame" }, "overscan_crop": "", - "overscan_color": [0, 0, 0, 1.0], + # "overscan_color": [0, 0, 0], + "overscan_color": [0, 0, 0, 0.0], "width": 1920, "height": 1080, "scale_pixel_aspect": True, @@ -941,7 +955,8 @@ DEFAULT_PUBLISH_VALUES = { "single_frame_filter": "multi_frame" }, "overscan_crop": "", - "overscan_color": [0, 0, 0, 1.0], + # "overscan_color": [0, 0, 0], + "overscan_color": [0, 0, 0, 0.0], "width": 0, "height": 0, "scale_pixel_aspect": True, diff --git a/server_addon/applications/server/__init__.py b/server_addon/applications/server/__init__.py index e782e8a591..d5c2de3df3 100644 --- a/server_addon/applications/server/__init__.py +++ b/server_addon/applications/server/__init__.py @@ -92,8 +92,9 @@ class ApplicationsAddon(BaseServerAddon): settings_model = ApplicationsAddonSettings async def get_default_settings(self): - applications_path = os.path.join(self.addon_dir, "applications.json") - tools_path = os.path.join(self.addon_dir, "tools.json") + server_dir = os.path.join(self.addon_dir, "server") + applications_path = os.path.join(server_dir, "applications.json") + tools_path = os.path.join(server_dir, "tools.json") default_values = copy.deepcopy(DEFAULT_VALUES) with open(applications_path, "r") as stream: default_values.update(json.load(stream)) diff --git a/server_addon/applications/server/applications.json b/server_addon/applications/server/applications.json index a2fbbac83f..b72d117225 100644 --- a/server_addon/applications/server/applications.json +++ b/server_addon/applications/server/applications.json @@ -1201,6 +1201,30 @@ } ] }, + "openrv": { + "enabled": true, + "label": "OpenRV", + "icon": "{}/app_icons/openrv.png", + "host_name": "openrv", + "environment": "{\n \n}", + "variants": [ + { + "name": "1.0.0", + "use_python_2": false, + "executables": { + "windows": [], + "darwin": [], + "linux": [] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": "{\n \n}" + } + ] + }, "additional_apps": [] } } diff --git a/server_addon/applications/server/settings.py b/server_addon/applications/server/settings.py index e0a59604c8..a49175d488 100644 --- a/server_addon/applications/server/settings.py +++ b/server_addon/applications/server/settings.py @@ -186,6 +186,8 @@ class ApplicationsSettings(BaseSettingsModel): default_factory=AppGroupWithPython, title="Unreal Editor") wrap: AppGroup = SettingsField( default_factory=AppGroupWithPython, title="Wrap") + openrv: AppGroup = SettingsField( + default_factory=AppGroupWithPython, title="OpenRV") additional_apps: list[AdditionalAppGroup] = SettingsField( default_factory=list, title="Additional Applications") diff --git a/server_addon/applications/server/version.py b/server_addon/applications/server/version.py index 1276d0254f..0a8da88258 100644 --- a/server_addon/applications/server/version.py +++ b/server_addon/applications/server/version.py @@ -1 +1 @@ -__version__ = "0.1.5" +__version__ = "0.1.6" diff --git a/server_addon/blender/server/settings/render_settings.py b/server_addon/blender/server/settings/render_settings.py index f91ba1627a..f992ea6fcc 100644 --- a/server_addon/blender/server/settings/render_settings.py +++ b/server_addon/blender/server/settings/render_settings.py @@ -23,6 +23,13 @@ def image_format_enum(): ] +def renderers_enum(): + return [ + {"value": "CYCLES", "label": "Cycles"}, + {"value": "BLENDER_EEVEE", "label": "Eevee"}, + ] + + def aov_list_enum(): return [ {"value": "empty", "label": "< none >"}, @@ -30,18 +37,52 @@ def aov_list_enum(): {"value": "z", "label": "Z"}, {"value": "mist", "label": "Mist"}, {"value": "normal", "label": "Normal"}, - {"value": "diffuse_light", "label": "Diffuse Light"}, + {"value": "position", "label": "Position (Cycles Only)"}, + {"value": "vector", "label": "Vector (Cycles Only)"}, + {"value": "uv", "label": "UV (Cycles Only)"}, + {"value": "denoising", "label": "Denoising Data (Cycles Only)"}, + {"value": "object_index", "label": "Object Index (Cycles Only)"}, + {"value": "material_index", "label": "Material Index (Cycles Only)"}, + {"value": "sample_count", "label": "Sample Count (Cycles Only)"}, + {"value": "diffuse_light", "label": "Diffuse Light/Direct"}, + { + "value": "diffuse_indirect", + "label": "Diffuse Indirect (Cycles Only)" + }, {"value": "diffuse_color", "label": "Diffuse Color"}, - {"value": "specular_light", "label": "Specular Light"}, - {"value": "specular_color", "label": "Specular Color"}, - {"value": "volume_light", "label": "Volume Light"}, + {"value": "specular_light", "label": "Specular (Glossy) Light/Direct"}, + { + "value": "specular_indirect", + "label": "Specular (Glossy) Indirect (Cycles Only)" + }, + {"value": "specular_color", "label": "Specular (Glossy) Color"}, + { + "value": "transmission_light", + "label": "Transmission Light/Direct (Cycles Only)" + }, + { + "value": "transmission_indirect", + "label": "Transmission Indirect (Cycles Only)" + }, + { + "value": "transmission_color", + "label": "Transmission Color (Cycles Only)" + }, + {"value": "volume_light", "label": "Volume Light/Direct"}, + {"value": "volume_indirect", "label": "Volume Indirect (Cycles Only)"}, {"value": "emission", "label": "Emission"}, {"value": "environment", "label": "Environment"}, - {"value": "shadow", "label": "Shadow"}, + {"value": "shadow", "label": "Shadow/Shadow Catcher"}, {"value": "ao", "label": "Ambient Occlusion"}, - {"value": "denoising", "label": "Denoising"}, - {"value": "volume_direct", "label": "Direct Volumetric Scattering"}, - {"value": "volume_indirect", "label": "Indirect Volumetric Scattering"} + {"value": "bloom", "label": "Bloom (Eevee Only)"}, + {"value": "transparent", "label": "Transparent (Eevee Only)"}, + {"value": "cryptomatte_object", "label": "Cryptomatte Object"}, + {"value": "cryptomatte_material", "label": "Cryptomatte Material"}, + {"value": "cryptomatte_asset", "label": "Cryptomatte Asset"}, + { + "value": "cryptomatte_accurate", + "label": "Cryptomatte Accurate Mode (Eevee Only)" + }, ] @@ -81,6 +122,14 @@ class RenderSettingsModel(BaseSettingsModel): multilayer_exr: bool = SettingsField( title="Multilayer (EXR)" ) + renderer: str = SettingsField( + "CYCLES", + title="Renderer", + enum_resolver=renderers_enum + ) + compositing: bool = SettingsField( + title="Enable Compositing" + ) aov_list: list[str] = SettingsField( default_factory=list, enum_resolver=aov_list_enum, @@ -102,6 +151,8 @@ DEFAULT_RENDER_SETTINGS = { "aov_separator": "underscore", "image_format": "exr", "multilayer_exr": True, - "aov_list": [], + "renderer": "CYCLES", + "compositing": True, + "aov_list": ["combined"], "custom_passes": [] } diff --git a/server_addon/blender/server/version.py b/server_addon/blender/server/version.py index 1276d0254f..0a8da88258 100644 --- a/server_addon/blender/server/version.py +++ b/server_addon/blender/server/version.py @@ -1 +1 @@ -__version__ = "0.1.5" +__version__ = "0.1.6" diff --git a/server_addon/create_ayon_addons.py b/server_addon/create_ayon_addons.py index 08443d6588..9553980f5d 100644 --- a/server_addon/create_ayon_addons.py +++ b/server_addon/create_ayon_addons.py @@ -40,6 +40,11 @@ IGNORED_HOSTS = [ IGNORED_MODULES = [] +PACKAGE_PY_TEMPLATE = """name = "{addon_name}" +version = "{addon_version}" +plugin_for = ["ayon_server"] +""" + class ZipFileLongPaths(zipfile.ZipFile): """Allows longer paths in zip files. @@ -144,18 +149,12 @@ def create_addon_zip( output_dir: Path, addon_name: str, addon_version: str, - keep_source: bool + keep_source: bool, ): zip_filepath = output_dir / f"{addon_name}-{addon_version}.zip" + addon_output_dir = output_dir / addon_name / addon_version with ZipFileLongPaths(zip_filepath, "w", zipfile.ZIP_DEFLATED) as zipf: - zipf.writestr( - "manifest.json", - json.dumps({ - "addon_name": addon_name, - "addon_version": addon_version - }) - ) # Add client code content to zip src_root = os.path.normpath(str(addon_output_dir.absolute())) src_root_offset = len(src_root) + 1 @@ -167,9 +166,10 @@ def create_addon_zip( for filename in filenames: src_path = os.path.join(root, filename) if rel_root: - dst_path = os.path.join("addon", rel_root, filename) + dst_path = os.path.join(rel_root, filename) else: - dst_path = os.path.join("addon", filename) + dst_path = filename + zipf.write(src_path, dst_path) if not keep_source: @@ -180,9 +180,8 @@ def create_addon_package( addon_dir: Path, output_dir: Path, create_zip: bool, - keep_source: bool + keep_source: bool, ): - server_dir = addon_dir / "server" addon_version = get_addon_version(addon_dir) addon_output_dir = output_dir / addon_dir.name / addon_version @@ -191,18 +190,21 @@ def create_addon_package( addon_output_dir.mkdir(parents=True) # Copy server content - src_root = os.path.normpath(str(server_dir.absolute())) - src_root_offset = len(src_root) + 1 - for root, _, filenames in os.walk(str(server_dir)): - dst_root = addon_output_dir - if root != src_root: - rel_root = root[src_root_offset:] - dst_root = dst_root / rel_root + package_py = addon_output_dir / "package.py" + addon_name = addon_dir.name + if addon_name == "royal_render": + addon_name = "royalrender" + package_py_content = PACKAGE_PY_TEMPLATE.format( + addon_name=addon_name, addon_version=addon_version + ) - dst_root.mkdir(parents=True, exist_ok=True) - for filename in filenames: - src_path = os.path.join(root, filename) - shutil.copy(src_path, str(dst_root)) + with open(package_py, "w+") as pkg_py: + pkg_py.write(package_py_content) + + server_dir = addon_dir / "server" + shutil.copytree( + server_dir, addon_output_dir / "server", dirs_exist_ok=True + ) if create_zip: create_addon_zip( diff --git a/server_addon/deadline/server/settings/publish_plugins.py b/server_addon/deadline/server/settings/publish_plugins.py index 8abe59674b..10ec8ac95f 100644 --- a/server_addon/deadline/server/settings/publish_plugins.py +++ b/server_addon/deadline/server/settings/publish_plugins.py @@ -287,6 +287,13 @@ class ProcessSubmittedJobOnFarmModel(BaseSettingsModel): default_factory=list, title="Skip integration of representation with ext" ) + families_transfer: list[str] = SettingsField( + default_factory=list, + title=( + "List of family names to transfer\n" + "to generated instances (AOVs for example)." + ) + ) aov_filter: list[AOVFilterSubmodel] = SettingsField( default_factory=list, title="Reviewable products filter", @@ -306,36 +313,38 @@ class PublishPluginsModel(BaseSettingsModel): default_factory=ValidateExpectedFilesModel, title="Validate Expected Files" ) - MayaSubmitDeadline: MayaSubmitDeadlineModel = SettingsField( - default_factory=MayaSubmitDeadlineModel, - title="Maya Submit to deadline") - MaxSubmitDeadline: MaxSubmitDeadlineModel = SettingsField( - default_factory=MaxSubmitDeadlineModel, - title="Max Submit to deadline") - FusionSubmitDeadline: FusionSubmitDeadlineModel = SettingsField( - default_factory=FusionSubmitDeadlineModel, - title="Fusion submit to Deadline") - NukeSubmitDeadline: NukeSubmitDeadlineModel = SettingsField( - default_factory=NukeSubmitDeadlineModel, - title="Nuke Submit to deadline") - HarmonySubmitDeadline: HarmonySubmitDeadlineModel = SettingsField( - default_factory=HarmonySubmitDeadlineModel, - title="Harmony Submit to deadline") AfterEffectsSubmitDeadline: AfterEffectsSubmitDeadlineModel = ( SettingsField( default_factory=AfterEffectsSubmitDeadlineModel, - title="After Effects to deadline" + title="After Effects to deadline", + section="Hosts" ) ) - CelactionSubmitDeadline: CelactionSubmitDeadlineModel = SettingsField( - default_factory=CelactionSubmitDeadlineModel, - title="Celaction Submit Deadline") BlenderSubmitDeadline: BlenderSubmitDeadlineModel = SettingsField( default_factory=BlenderSubmitDeadlineModel, title="Blender Submit Deadline") + CelactionSubmitDeadline: CelactionSubmitDeadlineModel = SettingsField( + default_factory=CelactionSubmitDeadlineModel, + title="Celaction Submit Deadline") + FusionSubmitDeadline: FusionSubmitDeadlineModel = SettingsField( + default_factory=FusionSubmitDeadlineModel, + title="Fusion submit to Deadline") + HarmonySubmitDeadline: HarmonySubmitDeadlineModel = SettingsField( + default_factory=HarmonySubmitDeadlineModel, + title="Harmony Submit to deadline") + MaxSubmitDeadline: MaxSubmitDeadlineModel = SettingsField( + default_factory=MaxSubmitDeadlineModel, + title="Max Submit to deadline") + MayaSubmitDeadline: MayaSubmitDeadlineModel = SettingsField( + default_factory=MayaSubmitDeadlineModel, + title="Maya Submit to deadline") + NukeSubmitDeadline: NukeSubmitDeadlineModel = SettingsField( + default_factory=NukeSubmitDeadlineModel, + title="Nuke Submit to deadline") ProcessSubmittedCacheJobOnFarm: ProcessCacheJobFarmModel = SettingsField( default_factory=ProcessCacheJobFarmModel, - title="Process submitted cache Job on farm.") + title="Process submitted cache Job on farm.", + section="Publish Jobs") ProcessSubmittedJobOnFarm: ProcessSubmittedJobOnFarmModel = SettingsField( default_factory=ProcessSubmittedJobOnFarmModel, title="Process submitted job on farm.") @@ -357,6 +366,65 @@ DEFAULT_DEADLINE_PLUGINS_SETTINGS = { "deadline" ] }, + "AfterEffectsSubmitDeadline": { + "enabled": True, + "optional": False, + "active": True, + "use_published": True, + "priority": 50, + "chunk_size": 10000, + "group": "", + "department": "", + "multiprocess": True + }, + "BlenderSubmitDeadline": { + "enabled": True, + "optional": False, + "active": True, + "use_published": True, + "priority": 50, + "chunk_size": 10, + "group": "none", + "job_delay": "00:00:00:00" + }, + "CelactionSubmitDeadline": { + "enabled": True, + "deadline_department": "", + "deadline_priority": 50, + "deadline_pool": "", + "deadline_pool_secondary": "", + "deadline_group": "", + "deadline_chunk_size": 10, + "deadline_job_delay": "00:00:00:00" + }, + "FusionSubmitDeadline": { + "enabled": True, + "optional": False, + "active": True, + "priority": 50, + "chunk_size": 10, + "concurrent_tasks": 1, + "group": "" + }, + "HarmonySubmitDeadline": { + "enabled": True, + "optional": False, + "active": True, + "use_published": True, + "priority": 50, + "chunk_size": 10000, + "group": "", + "department": "" + }, + "MaxSubmitDeadline": { + "enabled": True, + "optional": False, + "active": True, + "use_published": True, + "priority": 50, + "chunk_size": 10, + "group": "none" + }, "MayaSubmitDeadline": { "enabled": True, "optional": False, @@ -376,24 +444,6 @@ DEFAULT_DEADLINE_PLUGINS_SETTINGS = { "pluginInfo": "", "scene_patches": [] }, - "MaxSubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, - "use_published": True, - "priority": 50, - "chunk_size": 10, - "group": "none" - }, - "FusionSubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, - "priority": 50, - "chunk_size": 10, - "concurrent_tasks": 1, - "group": "" - }, "NukeSubmitDeadline": { "enabled": True, "optional": False, @@ -410,47 +460,6 @@ DEFAULT_DEADLINE_PLUGINS_SETTINGS = { "env_search_replace_values": [], "limit_groups": [] }, - "HarmonySubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, - "use_published": True, - "priority": 50, - "chunk_size": 10000, - "group": "", - "department": "" - }, - "AfterEffectsSubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, - "use_published": True, - "priority": 50, - "chunk_size": 10000, - "group": "", - "department": "", - "multiprocess": True - }, - "CelactionSubmitDeadline": { - "enabled": True, - "deadline_department": "", - "deadline_priority": 50, - "deadline_pool": "", - "deadline_pool_secondary": "", - "deadline_group": "", - "deadline_chunk_size": 10, - "deadline_job_delay": "00:00:00:00" - }, - "BlenderSubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, - "use_published": True, - "priority": 50, - "chunk_size": 10, - "group": "none", - "job_delay": "00:00:00:00" - }, "ProcessSubmittedCacheJobOnFarm": { "enabled": True, "deadline_department": "", @@ -468,6 +477,7 @@ DEFAULT_DEADLINE_PLUGINS_SETTINGS = { "deadline_priority": 50, "publishing_script": "", "skip_integration_repre_list": [], + "families_transfer": ["render3d", "render2d", "ftrack", "slate"], "aov_filter": [ { "name": "maya", diff --git a/server_addon/deadline/server/version.py b/server_addon/deadline/server/version.py index 9cb17e7976..c11f861afb 100644 --- a/server_addon/deadline/server/version.py +++ b/server_addon/deadline/server/version.py @@ -1 +1 @@ -__version__ = "0.1.8" +__version__ = "0.1.9" diff --git a/server_addon/max/server/settings/publishers.py b/server_addon/max/server/settings/publishers.py index da782cb494..729b8aa006 100644 --- a/server_addon/max/server/settings/publishers.py +++ b/server_addon/max/server/settings/publishers.py @@ -27,10 +27,21 @@ class ValidateAttributesModel(BaseSettingsModel): return value +class ValidateCameraAttributesModel(BaseSettingsModel): + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") + fov: float = SettingsField(0.0, title="Focal Length") + nearrange: float = SettingsField(0.0, title="Near Range") + farrange: float = SettingsField(0.0, title="Far Range") + nearclip: float = SettingsField(0.0, title="Near Clip") + farclip: float = SettingsField(0.0, title="Far Clip") + + class FamilyMappingItemModel(BaseSettingsModel): - product_types: list[str] = SettingsField( + families: list[str] = SettingsField( default_factory=list, - title="Product Types" + title="Families" ) plugins: list[str] = SettingsField( default_factory=list, @@ -54,20 +65,35 @@ class BasicValidateModel(BaseSettingsModel): class PublishersModel(BaseSettingsModel): + ValidateInstanceInContext: BasicValidateModel = SettingsField( + default_factory=BasicValidateModel, + title="Validate Instance In Context", + section="Validators" + ) ValidateFrameRange: BasicValidateModel = SettingsField( default_factory=BasicValidateModel, - title="Validate Frame Range", - section="Validators" + title="Validate Frame Range" ) ValidateAttributes: ValidateAttributesModel = SettingsField( default_factory=ValidateAttributesModel, title="Validate Attributes" ) - + ValidateCameraAttributes: ValidateCameraAttributesModel = SettingsField( + default_factory=ValidateCameraAttributesModel, + title="Validate Camera Attributes", + description=( + "If the value of the camera attributes set to 0, " + "the system automatically skips checking it" + ) + ) ValidateLoadedPlugin: ValidateLoadedPluginModel = SettingsField( default_factory=ValidateLoadedPluginModel, title="Validate Loaded Plugin" ) + ValidateMeshHasUVs: BasicValidateModel = SettingsField( + default_factory=BasicValidateModel, + title="Validate Mesh Has UVs" + ) ExtractModelObj: BasicValidateModel = SettingsField( default_factory=BasicValidateModel, title="Extract OBJ", @@ -92,6 +118,11 @@ class PublishersModel(BaseSettingsModel): DEFAULT_PUBLISH_SETTINGS = { + "ValidateInstanceInContext": { + "enabled": True, + "optional": True, + "active": True + }, "ValidateFrameRange": { "enabled": True, "optional": True, @@ -101,11 +132,26 @@ DEFAULT_PUBLISH_SETTINGS = { "enabled": False, "attributes": "{}" }, + "ValidateCameraAttributes": { + "enabled": True, + "optional": True, + "active": False, + "fov": 45.0, + "nearrange": 0.0, + "farrange": 1000.0, + "nearclip": 1.0, + "farclip": 1000.0 + }, "ValidateLoadedPlugin": { "enabled": False, "optional": True, "family_plugins_mapping": [] }, + "ValidateMeshHasUVs": { + "enabled": True, + "optional": True, + "active": False + }, "ExtractModelObj": { "enabled": True, "optional": True, diff --git a/server_addon/max/server/version.py b/server_addon/max/server/version.py index bbab0242f6..0a8da88258 100644 --- a/server_addon/max/server/version.py +++ b/server_addon/max/server/version.py @@ -1 +1 @@ -__version__ = "0.1.4" +__version__ = "0.1.6" diff --git a/server_addon/maya/server/settings/loaders.py b/server_addon/maya/server/settings/loaders.py index 15d4275b80..418a7046ae 100644 --- a/server_addon/maya/server/settings/loaders.py +++ b/server_addon/maya/server/settings/loaders.py @@ -1,5 +1,5 @@ from ayon_server.settings import BaseSettingsModel, SettingsField -from ayon_server.types import ColorRGBA_uint8 +from ayon_server.types import ColorRGB_float, ColorRGBA_uint8 class ColorsSetting(BaseSettingsModel): @@ -35,6 +35,54 @@ class ColorsSetting(BaseSettingsModel): (99, 206, 220, 1.0), title="Yeti Cache:") yetiRig: ColorRGBA_uint8 = SettingsField( (0, 205, 125, 1.0), title="Yeti Rig:") + # model: ColorRGB_float = SettingsField( + # (0.82, 0.52, 0.12), title="Model:" + # ) + # rig: ColorRGB_float = SettingsField( + # (0.23, 0.89, 0.92), title="Rig:" + # ) + # pointcache: ColorRGB_float = SettingsField( + # (0.37, 0.82, 0.12), title="Pointcache:" + # ) + # animation: ColorRGB_float = SettingsField( + # (0.37, 0.82, 0.12), title="Animation:" + # ) + # ass: ColorRGB_float = SettingsField( + # (0.98, 0.53, 0.21), title="Arnold StandIn:" + # ) + # camera: ColorRGB_float = SettingsField( + # (0.53, 0.45, 0.96), title="Camera:" + # ) + # fbx: ColorRGB_float = SettingsField( + # (0.84, 0.65, 1.0), title="FBX:" + # ) + # mayaAscii: ColorRGB_float = SettingsField( + # (0.26, 0.68, 1.0), title="Maya Ascii:" + # ) + # mayaScene: ColorRGB_float = SettingsField( + # (0.26, 0.68, 1.0), title="Maya Scene:" + # ) + # setdress: ColorRGB_float = SettingsField( + # (1.0, 0.98, 0.35), title="Set Dress:" + # ) + # layout: ColorRGB_float = SettingsField( + # (1.0, 0.98, 0.35), title="Layout:" + # ) + # vdbcache: ColorRGB_float = SettingsField( + # (0.98, 0.21, 0.0), title="VDB Cache:" + # ) + # vrayproxy: ColorRGB_float = SettingsField( + # (1.0, 0.59, 0.05), title="VRay Proxy:" + # ) + # vrayscene_layer: ColorRGB_float = SettingsField( + # (1.0, 0.59, 0.05), title="VRay Scene:" + # ) + # yeticache: ColorRGB_float = SettingsField( + # (0.39, 0.81, 0.86), title="Yeti Cache:" + # ) + # yetiRig: ColorRGB_float = SettingsField( + # (0.0, 0.80, 0.49), title="Yeti Rig:" + # ) class ReferenceLoaderModel(BaseSettingsModel): @@ -67,54 +115,38 @@ class LoadersModel(BaseSettingsModel): DEFAULT_LOADERS_SETTING = { "colors": { - "model": [ - 209, 132, 30, 1.0 - ], - "rig": [ - 59, 226, 235, 1.0 - ], - "pointcache": [ - 94, 209, 30, 1.0 - ], - "animation": [ - 94, 209, 30, 1.0 - ], - "ass": [ - 249, 135, 53, 1.0 - ], - "camera": [ - 136, 114, 244, 1.0 - ], - "fbx": [ - 215, 166, 255, 1.0 - ], - "mayaAscii": [ - 67, 174, 255, 1.0 - ], - "mayaScene": [ - 67, 174, 255, 1.0 - ], - "setdress": [ - 255, 250, 90, 1.0 - ], - "layout": [ - 255, 250, 90, 1.0 - ], - "vdbcache": [ - 249, 54, 0, 1.0 - ], - "vrayproxy": [ - 255, 150, 12, 1.0 - ], - "vrayscene_layer": [ - 255, 150, 12, 1.0 - ], - "yeticache": [ - 99, 206, 220, 1.0 - ], - "yetiRig": [ - 0, 205, 125, 1.0 - ] + "model": [209, 132, 30, 1.0], + "rig": [59, 226, 235, 1.0], + "pointcache": [94, 209, 30, 1.0], + "animation": [94, 209, 30, 1.0], + "ass": [249, 135, 53, 1.0], + "camera": [136, 114, 244, 1.0], + "fbx": [215, 166, 255, 1.0], + "mayaAscii": [67, 174, 255, 1.0], + "mayaScene": [67, 174, 255, 1.0], + "setdress": [255, 250, 90, 1.0], + "layout": [255, 250, 90, 1.0], + "vdbcache": [249, 54, 0, 1.0], + "vrayproxy": [255, 150, 12, 1.0], + "vrayscene_layer": [255, 150, 12, 1.0], + "yeticache": [99, 206, 220, 1.0], + "yetiRig": [0, 205, 125, 1.0] + # "model": [0.82, 0.52, 0.12], + # "rig": [0.23, 0.89, 0.92], + # "pointcache": [0.37, 0.82, 0.12], + # "animation": [0.37, 0.82, 0.12], + # "ass": [0.98, 0.53, 0.21], + # "camera":[0.53, 0.45, 0.96], + # "fbx": [0.84, 0.65, 1.0], + # "mayaAscii": [0.26, 0.68, 1.0], + # "mayaScene": [0.26, 0.68, 1.0], + # "setdress": [1.0, 0.98, 0.35], + # "layout": [1.0, 0.98, 0.35], + # "vdbcache": [0.98, 0.21, 0.0], + # "vrayproxy": [1.0, 0.59, 0.05], + # "vrayscene_layer": [1.0, 0.59, 0.05], + # "yeticache": [0.39, 0.81, 0.86], + # "yetiRig": [0.0, 0.80, 0.49], }, "reference_loader": { "namespace": "{folder[name]}_{product[name]}_##_", diff --git a/server_addon/maya/server/settings/publish_playblast.py b/server_addon/maya/server/settings/publish_playblast.py index 0461a18cc8..39f48bacbe 100644 --- a/server_addon/maya/server/settings/publish_playblast.py +++ b/server_addon/maya/server/settings/publish_playblast.py @@ -6,7 +6,7 @@ from ayon_server.settings import ( ensure_unique_names, task_types_enum, ) -from ayon_server.types import ColorRGBA_uint8 +from ayon_server.types import ColorRGBA_uint8, ColorRGB_float def hardware_falloff_enum(): @@ -57,6 +57,9 @@ class DisplayOptionsSetting(BaseSettingsModel): background: ColorRGBA_uint8 = SettingsField( (125, 125, 125, 1.0), title="Background Color" ) + # background: ColorRGB_float = SettingsField( + # (0.5, 0.5, 0.5), title="Background Color" + # ) displayGradient: bool = SettingsField( True, title="Display background gradient" ) @@ -66,6 +69,12 @@ class DisplayOptionsSetting(BaseSettingsModel): backgroundBottom: ColorRGBA_uint8 = SettingsField( (125, 125, 125, 1.0), title="Background Bottom" ) + # backgroundTop: ColorRGB_float = SettingsField( + # (0.5, 0.5, 0.5), title="Background Top" + # ) + # backgroundBottom: ColorRGB_float = SettingsField( + # (0.5, 0.5, 0.5), title="Background Bottom" + # ) class GenericSetting(BaseSettingsModel): @@ -282,24 +291,12 @@ DEFAULT_PLAYBLAST_SETTING = { }, "DisplayOptions": { "override_display": True, - "background": [ - 125, - 125, - 125, - 1.0 - ], - "backgroundBottom": [ - 125, - 125, - 125, - 1.0 - ], - "backgroundTop": [ - 125, - 125, - 125, - 1.0 - ], + "background": [125, 125, 125, 1.0], + "backgroundBottom": [125, 125, 125, 1.0], + "backgroundTop": [125, 125, 125, 1.0], + # "background": [0.5, 0.5, 0.5], + # "backgroundBottom": [0.5, 0.5, 0.5], + # "backgroundTop": [0.5, 0.5, 0.5], "displayGradient": True }, "Generic": { diff --git a/server_addon/maya/server/version.py b/server_addon/maya/server/version.py index 684d830189..8202425a2d 100644 --- a/server_addon/maya/server/version.py +++ b/server_addon/maya/server/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring addon version.""" -__version__ = "0.1.8" +__version__ = "0.1.9" diff --git a/server_addon/photoshop/server/settings/publish_plugins.py b/server_addon/photoshop/server/settings/publish_plugins.py index c4a392d490..d04faaf53a 100644 --- a/server_addon/photoshop/server/settings/publish_plugins.py +++ b/server_addon/photoshop/server/settings/publish_plugins.py @@ -62,9 +62,9 @@ class CollectColorCodedInstancesPlugin(BaseSettingsModel): enum_resolver=lambda: create_flatten_image_enum, ) - flatten_product_type_template: str = SettingsField( + flatten_product_name_template: str = SettingsField( "", - title="Subset template for flatten image" + title="Product name template for flatten image" ) color_code_mapping: list[ColorCodeMappings] = SettingsField( @@ -178,7 +178,7 @@ class PhotoshopPublishPlugins(BaseSettingsModel): DEFAULT_PUBLISH_SETTINGS = { "CollectColorCodedInstances": { "create_flatten_image": "no", - "flatten_product_type_template": "", + "flatten_product_name_template": "", "color_code_mapping": [] }, "CollectReview": { diff --git a/server_addon/photoshop/server/version.py b/server_addon/photoshop/server/version.py index a242f0e757..df0c92f1e2 100644 --- a/server_addon/photoshop/server/version.py +++ b/server_addon/photoshop/server/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring addon version.""" -__version__ = "0.1.1" +__version__ = "0.1.2" diff --git a/server_addon/tvpaint/server/settings/publish_plugins.py b/server_addon/tvpaint/server/settings/publish_plugins.py index 0623524c92..0d978e5714 100644 --- a/server_addon/tvpaint/server/settings/publish_plugins.py +++ b/server_addon/tvpaint/server/settings/publish_plugins.py @@ -1,5 +1,5 @@ from ayon_server.settings import BaseSettingsModel, SettingsField -from ayon_server.types import ColorRGBA_uint8 +from ayon_server.types import ColorRGBA_uint8, ColorRGB_uint8 class CollectRenderInstancesModel(BaseSettingsModel): @@ -10,10 +10,12 @@ class CollectRenderInstancesModel(BaseSettingsModel): class ExtractSequenceModel(BaseSettingsModel): """Review BG color is used for whole scene review and for thumbnails.""" - # TODO Use alpha color review_bg: ColorRGBA_uint8 = SettingsField( (255, 255, 255, 1.0), title="Review BG color") + # review_bg: ColorRGB_uint8 = SettingsField( + # (255, 255, 255), + # title="Review BG color") class ValidatePluginModel(BaseSettingsModel): @@ -100,6 +102,7 @@ DEFAULT_PUBLISH_SETTINGS = { "ignore_render_pass_transparency": False }, "ExtractSequence": { + # "review_bg": [255, 255, 255] "review_bg": [255, 255, 255, 1.0] }, "ValidateProjectSettings": { diff --git a/server_addon/tvpaint/server/version.py b/server_addon/tvpaint/server/version.py index 485f44ac21..b3f4756216 100644 --- a/server_addon/tvpaint/server/version.py +++ b/server_addon/tvpaint/server/version.py @@ -1 +1 @@ -__version__ = "0.1.1" +__version__ = "0.1.2"